var/home/core/zuul-output/0000755000175000017500000000000015071407415014531 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015071420421015465 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004547302215071420413017701 0ustar rootrootOct 08 07:16:53 crc systemd[1]: Starting Kubernetes Kubelet... Oct 08 07:16:53 crc restorecon[4672]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 08 07:16:53 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 08 07:16:54 crc restorecon[4672]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 08 07:16:54 crc restorecon[4672]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Oct 08 07:16:55 crc kubenswrapper[4693]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 08 07:16:55 crc kubenswrapper[4693]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Oct 08 07:16:55 crc kubenswrapper[4693]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 08 07:16:55 crc kubenswrapper[4693]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 08 07:16:55 crc kubenswrapper[4693]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Oct 08 07:16:55 crc kubenswrapper[4693]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.097893 4693 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104017 4693 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104048 4693 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104058 4693 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104068 4693 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104076 4693 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104083 4693 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104091 4693 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104099 4693 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104106 4693 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104114 4693 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104121 4693 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104132 4693 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104142 4693 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104151 4693 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104159 4693 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104168 4693 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104176 4693 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104185 4693 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104193 4693 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104201 4693 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104208 4693 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104216 4693 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104234 4693 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104242 4693 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104250 4693 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104259 4693 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104269 4693 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104278 4693 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104288 4693 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104297 4693 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104311 4693 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104324 4693 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104334 4693 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104344 4693 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104357 4693 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104370 4693 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104381 4693 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104393 4693 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104403 4693 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104413 4693 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104422 4693 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104434 4693 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104443 4693 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104458 4693 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104469 4693 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104479 4693 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104488 4693 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104497 4693 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104506 4693 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104515 4693 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104525 4693 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104534 4693 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104543 4693 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104552 4693 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104561 4693 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104571 4693 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104582 4693 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104592 4693 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104603 4693 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104615 4693 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104624 4693 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104634 4693 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104643 4693 feature_gate.go:330] unrecognized feature gate: Example Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104653 4693 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104661 4693 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104671 4693 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104681 4693 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104690 4693 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104699 4693 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104708 4693 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.104718 4693 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105014 4693 flags.go:64] FLAG: --address="0.0.0.0" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105040 4693 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105455 4693 flags.go:64] FLAG: --anonymous-auth="true" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105471 4693 flags.go:64] FLAG: --application-metrics-count-limit="100" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105485 4693 flags.go:64] FLAG: --authentication-token-webhook="false" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105497 4693 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105511 4693 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105529 4693 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105540 4693 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105574 4693 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105586 4693 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105597 4693 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105608 4693 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105619 4693 flags.go:64] FLAG: --cgroup-root="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105630 4693 flags.go:64] FLAG: --cgroups-per-qos="true" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105641 4693 flags.go:64] FLAG: --client-ca-file="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105652 4693 flags.go:64] FLAG: --cloud-config="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105664 4693 flags.go:64] FLAG: --cloud-provider="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105674 4693 flags.go:64] FLAG: --cluster-dns="[]" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105696 4693 flags.go:64] FLAG: --cluster-domain="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105707 4693 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105720 4693 flags.go:64] FLAG: --config-dir="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105730 4693 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105743 4693 flags.go:64] FLAG: --container-log-max-files="5" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105756 4693 flags.go:64] FLAG: --container-log-max-size="10Mi" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105768 4693 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105780 4693 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105792 4693 flags.go:64] FLAG: --containerd-namespace="k8s.io" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105803 4693 flags.go:64] FLAG: --contention-profiling="false" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105846 4693 flags.go:64] FLAG: --cpu-cfs-quota="true" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105858 4693 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105889 4693 flags.go:64] FLAG: --cpu-manager-policy="none" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105903 4693 flags.go:64] FLAG: --cpu-manager-policy-options="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105917 4693 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105928 4693 flags.go:64] FLAG: --enable-controller-attach-detach="true" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105939 4693 flags.go:64] FLAG: --enable-debugging-handlers="true" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105950 4693 flags.go:64] FLAG: --enable-load-reader="false" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105962 4693 flags.go:64] FLAG: --enable-server="true" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105973 4693 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.105992 4693 flags.go:64] FLAG: --event-burst="100" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106004 4693 flags.go:64] FLAG: --event-qps="50" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106015 4693 flags.go:64] FLAG: --event-storage-age-limit="default=0" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106027 4693 flags.go:64] FLAG: --event-storage-event-limit="default=0" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106038 4693 flags.go:64] FLAG: --eviction-hard="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106052 4693 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106063 4693 flags.go:64] FLAG: --eviction-minimum-reclaim="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106075 4693 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106086 4693 flags.go:64] FLAG: --eviction-soft="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106100 4693 flags.go:64] FLAG: --eviction-soft-grace-period="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106113 4693 flags.go:64] FLAG: --exit-on-lock-contention="false" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106124 4693 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106136 4693 flags.go:64] FLAG: --experimental-mounter-path="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106147 4693 flags.go:64] FLAG: --fail-cgroupv1="false" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106158 4693 flags.go:64] FLAG: --fail-swap-on="true" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106169 4693 flags.go:64] FLAG: --feature-gates="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106183 4693 flags.go:64] FLAG: --file-check-frequency="20s" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106197 4693 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106209 4693 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106222 4693 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106234 4693 flags.go:64] FLAG: --healthz-port="10248" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106245 4693 flags.go:64] FLAG: --help="false" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106257 4693 flags.go:64] FLAG: --hostname-override="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106267 4693 flags.go:64] FLAG: --housekeeping-interval="10s" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106279 4693 flags.go:64] FLAG: --http-check-frequency="20s" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106291 4693 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106302 4693 flags.go:64] FLAG: --image-credential-provider-config="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106312 4693 flags.go:64] FLAG: --image-gc-high-threshold="85" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106324 4693 flags.go:64] FLAG: --image-gc-low-threshold="80" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106336 4693 flags.go:64] FLAG: --image-service-endpoint="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106348 4693 flags.go:64] FLAG: --kernel-memcg-notification="false" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106359 4693 flags.go:64] FLAG: --kube-api-burst="100" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106370 4693 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106382 4693 flags.go:64] FLAG: --kube-api-qps="50" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106394 4693 flags.go:64] FLAG: --kube-reserved="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106406 4693 flags.go:64] FLAG: --kube-reserved-cgroup="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106416 4693 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106428 4693 flags.go:64] FLAG: --kubelet-cgroups="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106439 4693 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106451 4693 flags.go:64] FLAG: --lock-file="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106465 4693 flags.go:64] FLAG: --log-cadvisor-usage="false" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106477 4693 flags.go:64] FLAG: --log-flush-frequency="5s" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106489 4693 flags.go:64] FLAG: --log-json-info-buffer-size="0" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106507 4693 flags.go:64] FLAG: --log-json-split-stream="false" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106518 4693 flags.go:64] FLAG: --log-text-info-buffer-size="0" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106530 4693 flags.go:64] FLAG: --log-text-split-stream="false" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106541 4693 flags.go:64] FLAG: --logging-format="text" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106551 4693 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106563 4693 flags.go:64] FLAG: --make-iptables-util-chains="true" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106575 4693 flags.go:64] FLAG: --manifest-url="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106587 4693 flags.go:64] FLAG: --manifest-url-header="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106602 4693 flags.go:64] FLAG: --max-housekeeping-interval="15s" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106614 4693 flags.go:64] FLAG: --max-open-files="1000000" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106627 4693 flags.go:64] FLAG: --max-pods="110" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106639 4693 flags.go:64] FLAG: --maximum-dead-containers="-1" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106651 4693 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106662 4693 flags.go:64] FLAG: --memory-manager-policy="None" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106672 4693 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106684 4693 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106696 4693 flags.go:64] FLAG: --node-ip="192.168.126.11" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106707 4693 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106732 4693 flags.go:64] FLAG: --node-status-max-images="50" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106743 4693 flags.go:64] FLAG: --node-status-update-frequency="10s" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106756 4693 flags.go:64] FLAG: --oom-score-adj="-999" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106767 4693 flags.go:64] FLAG: --pod-cidr="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106778 4693 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106797 4693 flags.go:64] FLAG: --pod-manifest-path="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106808 4693 flags.go:64] FLAG: --pod-max-pids="-1" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106857 4693 flags.go:64] FLAG: --pods-per-core="0" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106869 4693 flags.go:64] FLAG: --port="10250" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106883 4693 flags.go:64] FLAG: --protect-kernel-defaults="false" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106896 4693 flags.go:64] FLAG: --provider-id="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106907 4693 flags.go:64] FLAG: --qos-reserved="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106918 4693 flags.go:64] FLAG: --read-only-port="10255" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106931 4693 flags.go:64] FLAG: --register-node="true" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106942 4693 flags.go:64] FLAG: --register-schedulable="true" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106955 4693 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106975 4693 flags.go:64] FLAG: --registry-burst="10" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106986 4693 flags.go:64] FLAG: --registry-qps="5" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.106997 4693 flags.go:64] FLAG: --reserved-cpus="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107007 4693 flags.go:64] FLAG: --reserved-memory="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107021 4693 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107032 4693 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107044 4693 flags.go:64] FLAG: --rotate-certificates="false" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107054 4693 flags.go:64] FLAG: --rotate-server-certificates="false" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107065 4693 flags.go:64] FLAG: --runonce="false" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107076 4693 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107088 4693 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107100 4693 flags.go:64] FLAG: --seccomp-default="false" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107111 4693 flags.go:64] FLAG: --serialize-image-pulls="true" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107122 4693 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107134 4693 flags.go:64] FLAG: --storage-driver-db="cadvisor" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107145 4693 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107156 4693 flags.go:64] FLAG: --storage-driver-password="root" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107167 4693 flags.go:64] FLAG: --storage-driver-secure="false" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107178 4693 flags.go:64] FLAG: --storage-driver-table="stats" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107189 4693 flags.go:64] FLAG: --storage-driver-user="root" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107201 4693 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107212 4693 flags.go:64] FLAG: --sync-frequency="1m0s" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107224 4693 flags.go:64] FLAG: --system-cgroups="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107235 4693 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107252 4693 flags.go:64] FLAG: --system-reserved-cgroup="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107264 4693 flags.go:64] FLAG: --tls-cert-file="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107275 4693 flags.go:64] FLAG: --tls-cipher-suites="[]" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107290 4693 flags.go:64] FLAG: --tls-min-version="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107301 4693 flags.go:64] FLAG: --tls-private-key-file="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107315 4693 flags.go:64] FLAG: --topology-manager-policy="none" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107327 4693 flags.go:64] FLAG: --topology-manager-policy-options="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107338 4693 flags.go:64] FLAG: --topology-manager-scope="container" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107350 4693 flags.go:64] FLAG: --v="2" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107364 4693 flags.go:64] FLAG: --version="false" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107377 4693 flags.go:64] FLAG: --vmodule="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107390 4693 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.107403 4693 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107665 4693 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107681 4693 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107693 4693 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107703 4693 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107714 4693 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107723 4693 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107733 4693 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107742 4693 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107751 4693 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107762 4693 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107772 4693 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107781 4693 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107791 4693 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107800 4693 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107810 4693 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107855 4693 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107868 4693 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107881 4693 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107893 4693 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107903 4693 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107913 4693 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107927 4693 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107939 4693 feature_gate.go:330] unrecognized feature gate: Example Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107948 4693 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107959 4693 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107970 4693 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107980 4693 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.107989 4693 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108001 4693 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108011 4693 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108021 4693 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108030 4693 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108040 4693 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108049 4693 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108059 4693 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108069 4693 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108078 4693 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108088 4693 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108097 4693 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108107 4693 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108117 4693 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108127 4693 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108140 4693 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108154 4693 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108166 4693 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108177 4693 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108188 4693 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108199 4693 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108209 4693 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108221 4693 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108233 4693 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108243 4693 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108253 4693 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108263 4693 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108280 4693 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108289 4693 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108299 4693 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108309 4693 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108318 4693 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108328 4693 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108337 4693 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108347 4693 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108357 4693 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108366 4693 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108379 4693 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108390 4693 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108400 4693 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108412 4693 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108421 4693 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108430 4693 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.108440 4693 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.109222 4693 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.122070 4693 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.122133 4693 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122259 4693 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122272 4693 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122281 4693 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122290 4693 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122299 4693 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122311 4693 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122325 4693 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122334 4693 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122343 4693 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122353 4693 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122361 4693 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122370 4693 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122378 4693 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122387 4693 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122395 4693 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122406 4693 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122415 4693 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122424 4693 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122432 4693 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122440 4693 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122448 4693 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122455 4693 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122463 4693 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122471 4693 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122480 4693 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122488 4693 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122496 4693 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122504 4693 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122511 4693 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122520 4693 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122528 4693 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122536 4693 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122544 4693 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122552 4693 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122564 4693 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122575 4693 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122583 4693 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122592 4693 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122600 4693 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122608 4693 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122616 4693 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122624 4693 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122631 4693 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122639 4693 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122648 4693 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122655 4693 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122663 4693 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122671 4693 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122679 4693 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122687 4693 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122697 4693 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122707 4693 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122715 4693 feature_gate.go:330] unrecognized feature gate: Example Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122723 4693 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122732 4693 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122741 4693 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122749 4693 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122758 4693 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122766 4693 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122775 4693 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122784 4693 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122793 4693 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122801 4693 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122809 4693 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122855 4693 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122882 4693 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122890 4693 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122897 4693 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122905 4693 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122913 4693 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.122922 4693 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.122935 4693 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123187 4693 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123201 4693 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123211 4693 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123220 4693 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123229 4693 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123237 4693 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123245 4693 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123252 4693 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123261 4693 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123269 4693 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123277 4693 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123285 4693 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123292 4693 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123300 4693 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123308 4693 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123315 4693 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123323 4693 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123332 4693 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123340 4693 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123349 4693 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123356 4693 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123364 4693 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123374 4693 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123384 4693 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123394 4693 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123402 4693 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123410 4693 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123420 4693 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123430 4693 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123438 4693 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123446 4693 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123453 4693 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123461 4693 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123469 4693 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123478 4693 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123489 4693 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123499 4693 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123507 4693 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123515 4693 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123524 4693 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123532 4693 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123540 4693 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123547 4693 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123555 4693 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123562 4693 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123570 4693 feature_gate.go:330] unrecognized feature gate: Example Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123580 4693 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123589 4693 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123597 4693 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123606 4693 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123614 4693 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123622 4693 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123630 4693 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123637 4693 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123645 4693 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123653 4693 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123661 4693 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123669 4693 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123676 4693 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123684 4693 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123692 4693 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123700 4693 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123707 4693 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123715 4693 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123723 4693 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123731 4693 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123738 4693 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123746 4693 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123753 4693 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123761 4693 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.123769 4693 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.123781 4693 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.125280 4693 server.go:940] "Client rotation is on, will bootstrap in background" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.130770 4693 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.130928 4693 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.134167 4693 server.go:997] "Starting client certificate rotation" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.134216 4693 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.134491 4693 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-26 09:02:08.296914105 +0000 UTC Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.134612 4693 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1177h45m13.162307025s for next certificate rotation Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.160692 4693 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.163395 4693 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.179678 4693 log.go:25] "Validated CRI v1 runtime API" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.216697 4693 log.go:25] "Validated CRI v1 image API" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.219070 4693 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.227661 4693 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-10-08-07-12-20-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.227712 4693 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.257043 4693 manager.go:217] Machine: {Timestamp:2025-10-08 07:16:55.252419625 +0000 UTC m=+0.623384560 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:553e2573-3cef-46f4-a622-139b36e277cd BootID:c1373c45-8137-4925-975b-395eb7ced486 Filesystems:[{Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:bf:03:29 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:bf:03:29 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:a7:92:a0 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:e1:21:86 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:bc:93:d7 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:a3:80:ea Speed:-1 Mtu:1496} {Name:eth10 MacAddress:36:3c:3a:27:64:e5 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:ce:87:9a:d0:0c:6d Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.257239 4693 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.257359 4693 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.257616 4693 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.257762 4693 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.257803 4693 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.258053 4693 topology_manager.go:138] "Creating topology manager with none policy" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.258063 4693 container_manager_linux.go:303] "Creating device plugin manager" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.258932 4693 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.258960 4693 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.259088 4693 state_mem.go:36] "Initialized new in-memory state store" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.259153 4693 server.go:1245] "Using root directory" path="/var/lib/kubelet" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.264917 4693 kubelet.go:418] "Attempting to sync node with API server" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.264937 4693 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.264951 4693 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.264962 4693 kubelet.go:324] "Adding apiserver pod source" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.264979 4693 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.270510 4693 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.271231 4693 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.273769 4693 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.274401 4693 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.166:6443: connect: connection refused Oct 08 07:16:55 crc kubenswrapper[4693]: E1008 07:16:55.274569 4693 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.166:6443: connect: connection refused" logger="UnhandledError" Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.274398 4693 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.166:6443: connect: connection refused Oct 08 07:16:55 crc kubenswrapper[4693]: E1008 07:16:55.274653 4693 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.166:6443: connect: connection refused" logger="UnhandledError" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.275083 4693 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.275104 4693 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.275111 4693 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.275118 4693 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.275130 4693 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.275137 4693 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.275144 4693 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.275156 4693 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.275165 4693 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.275174 4693 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.275184 4693 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.275191 4693 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.275928 4693 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.277436 4693 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.166:6443: connect: connection refused Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.277484 4693 server.go:1280] "Started kubelet" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.283229 4693 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.283223 4693 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.284385 4693 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Oct 08 07:16:55 crc systemd[1]: Started Kubernetes Kubelet. Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.286609 4693 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.286636 4693 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.286673 4693 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 17:51:20.663714993 +0000 UTC Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.286710 4693 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1234h34m25.377008073s for next certificate rotation Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.287003 4693 volume_manager.go:287] "The desired_state_of_world populator starts" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.287059 4693 volume_manager.go:289] "Starting Kubelet Volume Manager" Oct 08 07:16:55 crc kubenswrapper[4693]: E1008 07:16:55.287239 4693 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.287505 4693 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.289869 4693 factory.go:55] Registering systemd factory Oct 08 07:16:55 crc kubenswrapper[4693]: E1008 07:16:55.294207 4693 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.166:6443: connect: connection refused" interval="200ms" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.294977 4693 factory.go:221] Registration of the systemd container factory successfully Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.295730 4693 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.166:6443: connect: connection refused Oct 08 07:16:55 crc kubenswrapper[4693]: E1008 07:16:55.295900 4693 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.166:6443: connect: connection refused" logger="UnhandledError" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.300663 4693 factory.go:153] Registering CRI-O factory Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.300685 4693 factory.go:221] Registration of the crio container factory successfully Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.300755 4693 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.300785 4693 factory.go:103] Registering Raw factory Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.300805 4693 manager.go:1196] Started watching for new ooms in manager Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.301600 4693 manager.go:319] Starting recovery of all containers Oct 08 07:16:55 crc kubenswrapper[4693]: E1008 07:16:55.300088 4693 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.166:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186c72d5670bf51b default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-08 07:16:55.277458715 +0000 UTC m=+0.648423650,LastTimestamp:2025-10-08 07:16:55.277458715 +0000 UTC m=+0.648423650,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.302648 4693 server.go:460] "Adding debug handlers to kubelet server" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.306635 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.306733 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.306757 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.306776 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.306803 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.306846 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.306864 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.306881 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.306900 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.306918 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.306938 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.306956 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.306976 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.306996 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307016 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307033 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307053 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307070 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307089 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307106 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307123 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307141 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307159 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307177 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307197 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307214 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307234 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307253 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307273 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307290 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307307 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307324 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307345 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307362 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307380 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307397 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307414 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307431 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307448 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307466 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307483 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307500 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307519 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307537 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307554 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307630 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307649 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.307667 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.308950 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.308983 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309011 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309040 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309075 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309107 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309133 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309165 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309192 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309223 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309251 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309279 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309304 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309330 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309353 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309379 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309408 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309433 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309463 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309490 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309518 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309543 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309568 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309593 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309622 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309647 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309672 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309698 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309722 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309748 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309772 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309797 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309866 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309892 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309920 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309947 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.309974 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310046 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310072 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310098 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310125 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310150 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310175 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310265 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310299 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310324 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310348 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310372 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310412 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310477 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310501 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310527 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310550 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310573 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310596 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310621 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310655 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310682 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310708 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310732 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310804 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310862 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310890 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310918 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310948 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.310973 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.311001 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.311027 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.311050 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.311077 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.311103 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.311130 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.311155 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.311180 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.311202 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.311227 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.314522 4693 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.314614 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.314649 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.314693 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.314722 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.314748 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.314783 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.314809 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.314877 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.314903 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.314930 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.314964 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.314991 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.315017 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.315052 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.315101 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.315138 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.315166 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.315194 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.315234 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.315263 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.315302 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.315403 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.315433 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.315474 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.315504 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.315540 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.315567 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.315609 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.315655 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.315683 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.315723 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.315753 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.315780 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.315861 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.315900 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.315932 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.315976 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.316009 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.316052 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.316083 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.316113 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.316154 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.316181 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.316220 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.316323 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.316355 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.316391 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.316420 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.316461 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.316489 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.316522 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.316560 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.316588 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.316624 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.316650 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.316680 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.316717 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.316744 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.316778 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.316807 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.316881 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.316925 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.316954 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.316990 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.317019 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.317046 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.317088 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.317114 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.317154 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.317190 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.317219 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.319166 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.319258 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.319303 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.319331 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.319370 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.319398 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.319423 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.319487 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.319520 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.319556 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.319621 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.319707 4693 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.319730 4693 reconstruct.go:97] "Volume reconstruction finished" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.319744 4693 reconciler.go:26] "Reconciler: start to sync state" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.320014 4693 manager.go:324] Recovery completed Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.343239 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.345629 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.345691 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.345711 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.346793 4693 cpu_manager.go:225] "Starting CPU manager" policy="none" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.346850 4693 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.346878 4693 state_mem.go:36] "Initialized new in-memory state store" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.357970 4693 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.359519 4693 policy_none.go:49] "None policy: Start" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.360899 4693 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.360941 4693 memory_manager.go:170] "Starting memorymanager" policy="None" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.360974 4693 state_mem.go:35] "Initializing new in-memory state store" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.361508 4693 status_manager.go:217] "Starting to sync pod status with apiserver" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.361563 4693 kubelet.go:2335] "Starting kubelet main sync loop" Oct 08 07:16:55 crc kubenswrapper[4693]: E1008 07:16:55.361650 4693 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.362271 4693 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.166:6443: connect: connection refused Oct 08 07:16:55 crc kubenswrapper[4693]: E1008 07:16:55.362350 4693 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.166:6443: connect: connection refused" logger="UnhandledError" Oct 08 07:16:55 crc kubenswrapper[4693]: E1008 07:16:55.387570 4693 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.411369 4693 manager.go:334] "Starting Device Plugin manager" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.411468 4693 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.411490 4693 server.go:79] "Starting device plugin registration server" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.412174 4693 eviction_manager.go:189] "Eviction manager: starting control loop" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.412205 4693 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.412584 4693 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.412738 4693 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.412761 4693 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Oct 08 07:16:55 crc kubenswrapper[4693]: E1008 07:16:55.423970 4693 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.462770 4693 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc"] Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.462918 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.464413 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.464517 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.464542 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.464783 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.465028 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.465071 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.466149 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.466188 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.466199 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.466332 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.466514 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.466556 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.467266 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.467309 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.467332 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.467353 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.467332 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.467395 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.467401 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.467419 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.467441 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.467642 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.467699 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.467733 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.469144 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.469181 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.469193 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.469431 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.469485 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.469504 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.469728 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.469963 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.470035 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.471106 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.471151 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.471166 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.471314 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.471353 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.471370 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.471440 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.471482 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.472868 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.473537 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.473568 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:55 crc kubenswrapper[4693]: E1008 07:16:55.496597 4693 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.166:6443: connect: connection refused" interval="400ms" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.514122 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.515409 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.515453 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.515470 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.515507 4693 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 08 07:16:55 crc kubenswrapper[4693]: E1008 07:16:55.516203 4693 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.166:6443: connect: connection refused" node="crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.525974 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.526042 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.526083 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.526140 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.526172 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.526203 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.526306 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.526442 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.526546 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.526787 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.526887 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.526952 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.526983 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.527045 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.527077 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628162 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628228 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628261 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628291 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628323 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628354 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628383 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628412 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628451 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628459 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628488 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628495 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628499 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628554 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628557 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628610 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628620 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628403 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628617 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628607 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628732 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628749 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628774 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628860 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628869 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628905 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628950 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628990 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.629076 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.628906 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.717193 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.719605 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.719675 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.719695 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.719737 4693 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 08 07:16:55 crc kubenswrapper[4693]: E1008 07:16:55.720409 4693 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.166:6443: connect: connection refused" node="crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.824179 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.844869 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.875646 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.884392 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-991cc637541e1a2c5c07c3463e5a791e99339de931c2cdc7f1636ef94bd01af8 WatchSource:0}: Error finding container 991cc637541e1a2c5c07c3463e5a791e99339de931c2cdc7f1636ef94bd01af8: Status 404 returned error can't find the container with id 991cc637541e1a2c5c07c3463e5a791e99339de931c2cdc7f1636ef94bd01af8 Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.892109 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-f889941460f24249b5486c8548eadba585bcdbdb63bc0872232c80322581f31e WatchSource:0}: Error finding container f889941460f24249b5486c8548eadba585bcdbdb63bc0872232c80322581f31e: Status 404 returned error can't find the container with id f889941460f24249b5486c8548eadba585bcdbdb63bc0872232c80322581f31e Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.894110 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: E1008 07:16:55.898076 4693 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.166:6443: connect: connection refused" interval="800ms" Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.905008 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-b5fc5a739ebe6edf107e620fb13d40e80eeea7a47216ddcebd827db863007a93 WatchSource:0}: Error finding container b5fc5a739ebe6edf107e620fb13d40e80eeea7a47216ddcebd827db863007a93: Status 404 returned error can't find the container with id b5fc5a739ebe6edf107e620fb13d40e80eeea7a47216ddcebd827db863007a93 Oct 08 07:16:55 crc kubenswrapper[4693]: I1008 07:16:55.906458 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.926430 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-8ef6cbab389ffe78e357fcb683e90a9faa4b15430aec9524562dc81f814707b2 WatchSource:0}: Error finding container 8ef6cbab389ffe78e357fcb683e90a9faa4b15430aec9524562dc81f814707b2: Status 404 returned error can't find the container with id 8ef6cbab389ffe78e357fcb683e90a9faa4b15430aec9524562dc81f814707b2 Oct 08 07:16:55 crc kubenswrapper[4693]: W1008 07:16:55.937679 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-495481b4a62e34e2f9269b429361dd5985458e637d58fe6e7fbcef43acd064a5 WatchSource:0}: Error finding container 495481b4a62e34e2f9269b429361dd5985458e637d58fe6e7fbcef43acd064a5: Status 404 returned error can't find the container with id 495481b4a62e34e2f9269b429361dd5985458e637d58fe6e7fbcef43acd064a5 Oct 08 07:16:56 crc kubenswrapper[4693]: I1008 07:16:56.120761 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:56 crc kubenswrapper[4693]: I1008 07:16:56.122388 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:56 crc kubenswrapper[4693]: I1008 07:16:56.122480 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:56 crc kubenswrapper[4693]: I1008 07:16:56.122499 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:56 crc kubenswrapper[4693]: I1008 07:16:56.122539 4693 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 08 07:16:56 crc kubenswrapper[4693]: E1008 07:16:56.123145 4693 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.166:6443: connect: connection refused" node="crc" Oct 08 07:16:56 crc kubenswrapper[4693]: W1008 07:16:56.131745 4693 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.166:6443: connect: connection refused Oct 08 07:16:56 crc kubenswrapper[4693]: E1008 07:16:56.131879 4693 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.166:6443: connect: connection refused" logger="UnhandledError" Oct 08 07:16:56 crc kubenswrapper[4693]: W1008 07:16:56.185877 4693 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.166:6443: connect: connection refused Oct 08 07:16:56 crc kubenswrapper[4693]: E1008 07:16:56.185984 4693 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.166:6443: connect: connection refused" logger="UnhandledError" Oct 08 07:16:56 crc kubenswrapper[4693]: I1008 07:16:56.278549 4693 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.166:6443: connect: connection refused Oct 08 07:16:56 crc kubenswrapper[4693]: I1008 07:16:56.368425 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"8ef6cbab389ffe78e357fcb683e90a9faa4b15430aec9524562dc81f814707b2"} Oct 08 07:16:56 crc kubenswrapper[4693]: I1008 07:16:56.370586 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"b5fc5a739ebe6edf107e620fb13d40e80eeea7a47216ddcebd827db863007a93"} Oct 08 07:16:56 crc kubenswrapper[4693]: I1008 07:16:56.372152 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"991cc637541e1a2c5c07c3463e5a791e99339de931c2cdc7f1636ef94bd01af8"} Oct 08 07:16:56 crc kubenswrapper[4693]: I1008 07:16:56.373905 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"f889941460f24249b5486c8548eadba585bcdbdb63bc0872232c80322581f31e"} Oct 08 07:16:56 crc kubenswrapper[4693]: I1008 07:16:56.374993 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"495481b4a62e34e2f9269b429361dd5985458e637d58fe6e7fbcef43acd064a5"} Oct 08 07:16:56 crc kubenswrapper[4693]: E1008 07:16:56.699348 4693 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.166:6443: connect: connection refused" interval="1.6s" Oct 08 07:16:56 crc kubenswrapper[4693]: W1008 07:16:56.726293 4693 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.166:6443: connect: connection refused Oct 08 07:16:56 crc kubenswrapper[4693]: E1008 07:16:56.726396 4693 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.166:6443: connect: connection refused" logger="UnhandledError" Oct 08 07:16:56 crc kubenswrapper[4693]: W1008 07:16:56.835499 4693 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.166:6443: connect: connection refused Oct 08 07:16:56 crc kubenswrapper[4693]: E1008 07:16:56.835619 4693 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.166:6443: connect: connection refused" logger="UnhandledError" Oct 08 07:16:56 crc kubenswrapper[4693]: I1008 07:16:56.924106 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:56 crc kubenswrapper[4693]: I1008 07:16:56.925743 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:56 crc kubenswrapper[4693]: I1008 07:16:56.925778 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:56 crc kubenswrapper[4693]: I1008 07:16:56.925789 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:56 crc kubenswrapper[4693]: I1008 07:16:56.925844 4693 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 08 07:16:56 crc kubenswrapper[4693]: E1008 07:16:56.926319 4693 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.166:6443: connect: connection refused" node="crc" Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.278335 4693 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.166:6443: connect: connection refused Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.380740 4693 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161" exitCode=0 Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.380882 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161"} Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.380910 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.382685 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.382725 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.382742 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.384269 4693 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77" exitCode=0 Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.384322 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77"} Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.384433 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.386321 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.386388 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.386413 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.386919 4693 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="6a18ea6cc196ee8df94b29ebc1fb2a1ee8a8123cf49478f8f9655a3f4caea5e9" exitCode=0 Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.386944 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"6a18ea6cc196ee8df94b29ebc1fb2a1ee8a8123cf49478f8f9655a3f4caea5e9"} Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.387204 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.388687 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.388748 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.388765 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.389202 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.389980 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.390010 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.390021 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.390069 4693 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="bf1c6feb01dded41617cbf1e40114c9efa9cebf177f926365b1e2c1fd9bb7803" exitCode=0 Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.390139 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"bf1c6feb01dded41617cbf1e40114c9efa9cebf177f926365b1e2c1fd9bb7803"} Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.390250 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.391322 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.391361 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.391376 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.394700 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415"} Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.394749 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5"} Oct 08 07:16:57 crc kubenswrapper[4693]: I1008 07:16:57.394776 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119"} Oct 08 07:16:58 crc kubenswrapper[4693]: W1008 07:16:58.135320 4693 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.166:6443: connect: connection refused Oct 08 07:16:58 crc kubenswrapper[4693]: E1008 07:16:58.135401 4693 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.166:6443: connect: connection refused" logger="UnhandledError" Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.278157 4693 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.166:6443: connect: connection refused Oct 08 07:16:58 crc kubenswrapper[4693]: E1008 07:16:58.300625 4693 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.166:6443: connect: connection refused" interval="3.2s" Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.402252 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"26b5cb5fac06e68b9604fb24cff750293a0545225bfecf0e60f748a1dde8de32"} Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.402299 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"0cd5c17f3c2d3acc27a1313d24b59f0fad8c0b57bdd789587fee6e4821bb8fe5"} Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.402312 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c7e1786f78d12b347add638d55c544fc801321848b51bf78229917eca9bf4441"} Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.402327 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.403718 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.403785 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.403809 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.407970 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4"} Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.408003 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471"} Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.408016 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3"} Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.408029 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da"} Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.410329 4693 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="ebf3c060617e71b41d695d7fc56b39c03ab17791dfee8f0b8abfb2197eda0336" exitCode=0 Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.410401 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"ebf3c060617e71b41d695d7fc56b39c03ab17791dfee8f0b8abfb2197eda0336"} Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.410479 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.411645 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.411721 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.411742 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.420479 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.420529 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"8ef29025877e988edc36110ef9e9ec8fa1fb612a9392b9f7b9cfa6b34d9f1dc2"} Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.430272 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.430305 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.430316 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.440667 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15"} Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.440809 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.441993 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.442019 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.442030 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.502467 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.514205 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.528571 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.531084 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.531113 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.531121 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:58 crc kubenswrapper[4693]: I1008 07:16:58.531141 4693 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 08 07:16:58 crc kubenswrapper[4693]: E1008 07:16:58.531501 4693 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.166:6443: connect: connection refused" node="crc" Oct 08 07:16:58 crc kubenswrapper[4693]: W1008 07:16:58.801556 4693 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.166:6443: connect: connection refused Oct 08 07:16:58 crc kubenswrapper[4693]: E1008 07:16:58.801638 4693 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.166:6443: connect: connection refused" logger="UnhandledError" Oct 08 07:16:59 crc kubenswrapper[4693]: I1008 07:16:59.450700 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2"} Oct 08 07:16:59 crc kubenswrapper[4693]: I1008 07:16:59.450850 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:59 crc kubenswrapper[4693]: I1008 07:16:59.451888 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:59 crc kubenswrapper[4693]: I1008 07:16:59.451929 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:59 crc kubenswrapper[4693]: I1008 07:16:59.451946 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:59 crc kubenswrapper[4693]: I1008 07:16:59.453394 4693 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="3c748357703b272479c822c1f11170a7f63a1ed839f071c7da86dd43a6ddd17a" exitCode=0 Oct 08 07:16:59 crc kubenswrapper[4693]: I1008 07:16:59.453505 4693 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 08 07:16:59 crc kubenswrapper[4693]: I1008 07:16:59.453526 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:59 crc kubenswrapper[4693]: I1008 07:16:59.453552 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:59 crc kubenswrapper[4693]: I1008 07:16:59.453999 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"3c748357703b272479c822c1f11170a7f63a1ed839f071c7da86dd43a6ddd17a"} Oct 08 07:16:59 crc kubenswrapper[4693]: I1008 07:16:59.454131 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:59 crc kubenswrapper[4693]: I1008 07:16:59.454151 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:16:59 crc kubenswrapper[4693]: I1008 07:16:59.454220 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 08 07:16:59 crc kubenswrapper[4693]: I1008 07:16:59.454844 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:59 crc kubenswrapper[4693]: I1008 07:16:59.454883 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:59 crc kubenswrapper[4693]: I1008 07:16:59.454857 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:59 crc kubenswrapper[4693]: I1008 07:16:59.454900 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:59 crc kubenswrapper[4693]: I1008 07:16:59.454923 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:59 crc kubenswrapper[4693]: I1008 07:16:59.455015 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:59 crc kubenswrapper[4693]: I1008 07:16:59.455752 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:59 crc kubenswrapper[4693]: I1008 07:16:59.455794 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:59 crc kubenswrapper[4693]: I1008 07:16:59.455835 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:59 crc kubenswrapper[4693]: I1008 07:16:59.455834 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:16:59 crc kubenswrapper[4693]: I1008 07:16:59.455863 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:16:59 crc kubenswrapper[4693]: I1008 07:16:59.455874 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:16:59 crc kubenswrapper[4693]: I1008 07:16:59.701766 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 08 07:17:00 crc kubenswrapper[4693]: I1008 07:17:00.089487 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 08 07:17:00 crc kubenswrapper[4693]: I1008 07:17:00.201976 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 08 07:17:00 crc kubenswrapper[4693]: I1008 07:17:00.462615 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"fb31a751d7102ba6703b23c01d0c2630753d6fc4cf2d94defc2471bdfa750855"} Oct 08 07:17:00 crc kubenswrapper[4693]: I1008 07:17:00.462697 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f486687a2d16ce5b63419b573a1181e179b5c189f2e2630f41ef3a8d4c4d1cf7"} Oct 08 07:17:00 crc kubenswrapper[4693]: I1008 07:17:00.462727 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"37ff83c660a42140f00ba5d17d1ff80ba0b2736e2a896b5216746f9b1cf90e52"} Oct 08 07:17:00 crc kubenswrapper[4693]: I1008 07:17:00.462741 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:17:00 crc kubenswrapper[4693]: I1008 07:17:00.462874 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:17:00 crc kubenswrapper[4693]: I1008 07:17:00.464466 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:00 crc kubenswrapper[4693]: I1008 07:17:00.464517 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:00 crc kubenswrapper[4693]: I1008 07:17:00.464540 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:00 crc kubenswrapper[4693]: I1008 07:17:00.464564 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:00 crc kubenswrapper[4693]: I1008 07:17:00.464612 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:00 crc kubenswrapper[4693]: I1008 07:17:00.464710 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:01 crc kubenswrapper[4693]: I1008 07:17:01.217766 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 08 07:17:01 crc kubenswrapper[4693]: I1008 07:17:01.472698 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f8d9f43b7837306ac173f02f7e36958595197c1322bf8e1ebfe72e9d9affa228"} Oct 08 07:17:01 crc kubenswrapper[4693]: I1008 07:17:01.472779 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:17:01 crc kubenswrapper[4693]: I1008 07:17:01.472878 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:17:01 crc kubenswrapper[4693]: I1008 07:17:01.472779 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7e6f102b9ca6e31e0bf73e1977d3a32ac33e8897bce0006f4688aaa6131042ca"} Oct 08 07:17:01 crc kubenswrapper[4693]: I1008 07:17:01.472975 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:17:01 crc kubenswrapper[4693]: I1008 07:17:01.474484 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:01 crc kubenswrapper[4693]: I1008 07:17:01.474532 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:01 crc kubenswrapper[4693]: I1008 07:17:01.474556 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:01 crc kubenswrapper[4693]: I1008 07:17:01.474580 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:01 crc kubenswrapper[4693]: I1008 07:17:01.474611 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:01 crc kubenswrapper[4693]: I1008 07:17:01.474651 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:01 crc kubenswrapper[4693]: I1008 07:17:01.474670 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:01 crc kubenswrapper[4693]: I1008 07:17:01.474627 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:01 crc kubenswrapper[4693]: I1008 07:17:01.474745 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:01 crc kubenswrapper[4693]: I1008 07:17:01.732031 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:17:01 crc kubenswrapper[4693]: I1008 07:17:01.733722 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:01 crc kubenswrapper[4693]: I1008 07:17:01.733770 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:01 crc kubenswrapper[4693]: I1008 07:17:01.733789 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:01 crc kubenswrapper[4693]: I1008 07:17:01.733855 4693 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 08 07:17:01 crc kubenswrapper[4693]: I1008 07:17:01.847144 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Oct 08 07:17:02 crc kubenswrapper[4693]: I1008 07:17:02.475687 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:17:02 crc kubenswrapper[4693]: I1008 07:17:02.475707 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:17:02 crc kubenswrapper[4693]: I1008 07:17:02.477151 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:02 crc kubenswrapper[4693]: I1008 07:17:02.477210 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:02 crc kubenswrapper[4693]: I1008 07:17:02.477218 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:02 crc kubenswrapper[4693]: I1008 07:17:02.477264 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:02 crc kubenswrapper[4693]: I1008 07:17:02.477281 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:02 crc kubenswrapper[4693]: I1008 07:17:02.477230 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:02 crc kubenswrapper[4693]: I1008 07:17:02.482529 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 08 07:17:02 crc kubenswrapper[4693]: I1008 07:17:02.482708 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:17:02 crc kubenswrapper[4693]: I1008 07:17:02.484009 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:02 crc kubenswrapper[4693]: I1008 07:17:02.484083 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:02 crc kubenswrapper[4693]: I1008 07:17:02.484105 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:03 crc kubenswrapper[4693]: I1008 07:17:03.477728 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:17:03 crc kubenswrapper[4693]: I1008 07:17:03.478725 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:03 crc kubenswrapper[4693]: I1008 07:17:03.478786 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:03 crc kubenswrapper[4693]: I1008 07:17:03.478809 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:03 crc kubenswrapper[4693]: I1008 07:17:03.666219 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 08 07:17:03 crc kubenswrapper[4693]: I1008 07:17:03.666456 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:17:03 crc kubenswrapper[4693]: I1008 07:17:03.667875 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:03 crc kubenswrapper[4693]: I1008 07:17:03.667904 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:03 crc kubenswrapper[4693]: I1008 07:17:03.667914 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:04 crc kubenswrapper[4693]: I1008 07:17:04.847565 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Oct 08 07:17:04 crc kubenswrapper[4693]: I1008 07:17:04.847867 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:17:04 crc kubenswrapper[4693]: I1008 07:17:04.849218 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:04 crc kubenswrapper[4693]: I1008 07:17:04.849244 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:04 crc kubenswrapper[4693]: I1008 07:17:04.849252 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:05 crc kubenswrapper[4693]: E1008 07:17:05.424096 4693 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 08 07:17:05 crc kubenswrapper[4693]: I1008 07:17:05.482619 4693 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 08 07:17:05 crc kubenswrapper[4693]: I1008 07:17:05.482727 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 08 07:17:08 crc kubenswrapper[4693]: I1008 07:17:08.074118 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 08 07:17:08 crc kubenswrapper[4693]: I1008 07:17:08.074309 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:17:08 crc kubenswrapper[4693]: I1008 07:17:08.075764 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:08 crc kubenswrapper[4693]: I1008 07:17:08.075858 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:08 crc kubenswrapper[4693]: I1008 07:17:08.075885 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:09 crc kubenswrapper[4693]: W1008 07:17:09.193884 4693 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 08 07:17:09 crc kubenswrapper[4693]: I1008 07:17:09.194011 4693 trace.go:236] Trace[1709759195]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (08-Oct-2025 07:16:59.192) (total time: 10001ms): Oct 08 07:17:09 crc kubenswrapper[4693]: Trace[1709759195]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (07:17:09.193) Oct 08 07:17:09 crc kubenswrapper[4693]: Trace[1709759195]: [10.001786257s] [10.001786257s] END Oct 08 07:17:09 crc kubenswrapper[4693]: E1008 07:17:09.194043 4693 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 08 07:17:09 crc kubenswrapper[4693]: I1008 07:17:09.279473 4693 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Oct 08 07:17:09 crc kubenswrapper[4693]: W1008 07:17:09.290925 4693 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 08 07:17:09 crc kubenswrapper[4693]: I1008 07:17:09.291008 4693 trace.go:236] Trace[1059283544]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (08-Oct-2025 07:16:59.290) (total time: 10000ms): Oct 08 07:17:09 crc kubenswrapper[4693]: Trace[1059283544]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10000ms (07:17:09.290) Oct 08 07:17:09 crc kubenswrapper[4693]: Trace[1059283544]: [10.000833212s] [10.000833212s] END Oct 08 07:17:09 crc kubenswrapper[4693]: E1008 07:17:09.291030 4693 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 08 07:17:09 crc kubenswrapper[4693]: I1008 07:17:09.585101 4693 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 08 07:17:09 crc kubenswrapper[4693]: I1008 07:17:09.585181 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 08 07:17:09 crc kubenswrapper[4693]: I1008 07:17:09.589502 4693 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 08 07:17:09 crc kubenswrapper[4693]: I1008 07:17:09.589583 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 08 07:17:11 crc kubenswrapper[4693]: I1008 07:17:11.224971 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 08 07:17:11 crc kubenswrapper[4693]: I1008 07:17:11.225227 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:17:11 crc kubenswrapper[4693]: I1008 07:17:11.226755 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:11 crc kubenswrapper[4693]: I1008 07:17:11.226837 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:11 crc kubenswrapper[4693]: I1008 07:17:11.226859 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:11 crc kubenswrapper[4693]: I1008 07:17:11.231739 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 08 07:17:11 crc kubenswrapper[4693]: I1008 07:17:11.499962 4693 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 08 07:17:11 crc kubenswrapper[4693]: I1008 07:17:11.500077 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:17:11 crc kubenswrapper[4693]: I1008 07:17:11.501278 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:11 crc kubenswrapper[4693]: I1008 07:17:11.501339 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:11 crc kubenswrapper[4693]: I1008 07:17:11.501351 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:11 crc kubenswrapper[4693]: I1008 07:17:11.885563 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Oct 08 07:17:11 crc kubenswrapper[4693]: I1008 07:17:11.885884 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:17:11 crc kubenswrapper[4693]: I1008 07:17:11.887881 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:11 crc kubenswrapper[4693]: I1008 07:17:11.887939 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:11 crc kubenswrapper[4693]: I1008 07:17:11.887958 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:11 crc kubenswrapper[4693]: I1008 07:17:11.903367 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Oct 08 07:17:12 crc kubenswrapper[4693]: I1008 07:17:12.502852 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:17:12 crc kubenswrapper[4693]: I1008 07:17:12.504289 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:12 crc kubenswrapper[4693]: I1008 07:17:12.504355 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:12 crc kubenswrapper[4693]: I1008 07:17:12.504370 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:12 crc kubenswrapper[4693]: I1008 07:17:12.843957 4693 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Oct 08 07:17:14 crc kubenswrapper[4693]: E1008 07:17:14.578415 4693 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Oct 08 07:17:14 crc kubenswrapper[4693]: I1008 07:17:14.581901 4693 trace.go:236] Trace[784162398]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (08-Oct-2025 07:17:04.215) (total time: 10366ms): Oct 08 07:17:14 crc kubenswrapper[4693]: Trace[784162398]: ---"Objects listed" error: 10366ms (07:17:14.581) Oct 08 07:17:14 crc kubenswrapper[4693]: Trace[784162398]: [10.366593837s] [10.366593837s] END Oct 08 07:17:14 crc kubenswrapper[4693]: I1008 07:17:14.581938 4693 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Oct 08 07:17:14 crc kubenswrapper[4693]: I1008 07:17:14.586056 4693 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Oct 08 07:17:14 crc kubenswrapper[4693]: E1008 07:17:14.586736 4693 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Oct 08 07:17:14 crc kubenswrapper[4693]: I1008 07:17:14.587551 4693 trace.go:236] Trace[976668283]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (08-Oct-2025 07:17:04.148) (total time: 10439ms): Oct 08 07:17:14 crc kubenswrapper[4693]: Trace[976668283]: ---"Objects listed" error: 10438ms (07:17:14.587) Oct 08 07:17:14 crc kubenswrapper[4693]: Trace[976668283]: [10.439085307s] [10.439085307s] END Oct 08 07:17:14 crc kubenswrapper[4693]: I1008 07:17:14.587585 4693 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Oct 08 07:17:14 crc kubenswrapper[4693]: I1008 07:17:14.643293 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 08 07:17:14 crc kubenswrapper[4693]: I1008 07:17:14.643497 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:17:14 crc kubenswrapper[4693]: I1008 07:17:14.644948 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:14 crc kubenswrapper[4693]: I1008 07:17:14.645009 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:14 crc kubenswrapper[4693]: I1008 07:17:14.645024 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:14 crc kubenswrapper[4693]: I1008 07:17:14.685173 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 08 07:17:14 crc kubenswrapper[4693]: I1008 07:17:14.685469 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:17:14 crc kubenswrapper[4693]: I1008 07:17:14.687397 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:14 crc kubenswrapper[4693]: I1008 07:17:14.687444 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:14 crc kubenswrapper[4693]: I1008 07:17:14.687462 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:14 crc kubenswrapper[4693]: I1008 07:17:14.698938 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.226152 4693 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.277123 4693 apiserver.go:52] "Watching apiserver" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.280944 4693 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.281404 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf"] Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.281972 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.281984 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.282014 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.282138 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.282207 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.282567 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.282615 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.282663 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.282883 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.288467 4693 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.290729 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.290779 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.290827 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.290860 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.290889 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.290954 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291074 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291105 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291132 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291156 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291180 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291204 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291230 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291257 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291279 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291303 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291329 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.291373 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:17:15.791340059 +0000 UTC m=+21.162305004 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291431 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291425 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291469 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291505 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291531 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291525 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291537 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291553 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291625 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291635 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291674 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291706 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291738 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291774 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291806 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291844 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291867 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291878 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291900 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291927 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291931 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.291977 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292005 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292031 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292057 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292079 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292103 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292126 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292133 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292147 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292177 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292200 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292223 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292247 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292269 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292294 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292317 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292341 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292364 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292389 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292414 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292433 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292452 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292475 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292498 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292522 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292547 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292571 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292596 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292621 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292685 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292708 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292733 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292771 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292796 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292837 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292863 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292889 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292912 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292947 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293189 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293211 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293234 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293255 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293274 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293296 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293316 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293340 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293362 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293389 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293413 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293437 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293459 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293482 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293504 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293522 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293541 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293559 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293581 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293611 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293630 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293653 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293673 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293700 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294079 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294113 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294137 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294157 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294180 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294203 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294226 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294251 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294295 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294320 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294344 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294367 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294392 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294417 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294440 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294463 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294486 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294511 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294534 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294558 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294582 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294604 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294631 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294655 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294680 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294704 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294728 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294752 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294777 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294800 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294845 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294869 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294892 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294919 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294945 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294969 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294990 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295009 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295032 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295058 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295081 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295104 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295129 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295151 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295169 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295186 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295203 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295220 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295237 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295254 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295272 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295293 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295309 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295326 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295341 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295357 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295375 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295397 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295420 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295441 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295466 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295491 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295514 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295535 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295558 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295580 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295605 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295627 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295649 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295671 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295694 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295725 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295752 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295777 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295801 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295845 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295870 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295893 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295916 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295941 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295963 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295989 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296014 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296041 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296067 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296093 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296116 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296142 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296168 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296192 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296217 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296241 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296265 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296291 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296316 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296341 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296367 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296389 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296414 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296438 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296464 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296488 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296543 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296580 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296616 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296644 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296674 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296700 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296727 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296756 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296788 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296866 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296902 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296930 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296955 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296982 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.297041 4693 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.297057 4693 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.297073 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.297093 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.297130 4693 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.297144 4693 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.297158 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.297171 4693 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.332456 4693 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.338846 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.340415 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.341656 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292134 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292155 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292183 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292180 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292280 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292433 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292474 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292541 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292560 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292662 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292748 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292798 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.292902 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293342 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293387 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293395 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293428 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293626 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293656 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.293729 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.294018 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295198 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.295703 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296029 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.296527 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.297138 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.297464 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.297713 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.297926 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.297989 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.298264 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.298318 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.298495 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.298776 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.298878 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.298966 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.343033 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.299083 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.299333 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.299313 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.299363 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.299639 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.299745 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.299877 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.300104 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.300413 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.300603 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.300633 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.343426 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.343442 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.300697 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.300847 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.343793 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.343794 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.300856 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.300899 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.300912 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.343929 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.300962 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.344040 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.300999 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.344155 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.301080 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.301066 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.301086 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.301145 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.301201 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.301318 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.301367 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.301396 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.302167 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.303054 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.303336 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.303424 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.304399 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.304765 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.305077 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.305322 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.305559 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.305903 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.344780 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.306437 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.306829 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.309721 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.309919 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.310065 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.310457 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.310624 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.310932 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.310979 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.311325 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.311593 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.311964 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.328163 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.328650 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.329000 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.329275 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.329565 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.329626 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.329643 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.330072 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.330437 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.330768 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.330926 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.331105 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.331622 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.331637 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.332039 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.332046 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.332064 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.332064 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.332301 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.332300 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.332356 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.332494 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.332529 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.332695 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.332986 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.333030 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.333096 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.346057 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.333173 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.333399 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.333330 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.333789 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.333805 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.333963 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.334203 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.334628 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.334935 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.335314 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.335718 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.336329 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.336579 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.336640 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.336901 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.337364 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.337998 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.338295 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.338567 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.338720 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.339044 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.339061 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.339528 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.339863 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.346416 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.339902 4693 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.346441 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.346513 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-08 07:17:15.846487441 +0000 UTC m=+21.217452426 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.339977 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.340028 4693 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.346598 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-08 07:17:15.846588744 +0000 UTC m=+21.217553899 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.346697 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.346748 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.352670 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.340387 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.340562 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.341201 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.342871 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.342999 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.343048 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.344119 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.340274 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.356097 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.356316 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.356649 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.356981 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.357147 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.357509 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.357634 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.358196 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.358229 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.358562 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.358601 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.358627 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.358868 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.359124 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.359133 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.359264 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.359363 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.359427 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.359734 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.359784 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.359792 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.359422 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.359934 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.360048 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.360053 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.360678 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.360980 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.361266 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.361365 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.361588 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.361687 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.362638 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.362654 4693 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.362710 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.362607 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.361690 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.362914 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.362948 4693 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.363022 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-08 07:17:15.863002612 +0000 UTC m=+21.233967547 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.363051 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-08 07:17:15.863038843 +0000 UTC m=+21.234003778 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.363524 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.364100 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.364731 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.367072 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.367251 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.371903 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.372327 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.372461 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.372732 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.374280 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.375106 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.376380 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.378898 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.380698 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.381738 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.381801 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.383676 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.385050 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.385563 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.386386 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.386503 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.386898 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.392318 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.392496 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.393113 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.393632 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.395086 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.395603 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398072 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398124 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398267 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398283 4693 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398295 4693 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398307 4693 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398319 4693 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398331 4693 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398342 4693 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398354 4693 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398368 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398379 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398390 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398406 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398417 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398429 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398440 4693 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398451 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398463 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398475 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398487 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398499 4693 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398512 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398524 4693 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398535 4693 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398547 4693 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398558 4693 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398570 4693 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398581 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398592 4693 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398603 4693 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398615 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398630 4693 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398642 4693 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398655 4693 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398667 4693 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398679 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398692 4693 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398704 4693 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398716 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398728 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398743 4693 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398755 4693 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398767 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398778 4693 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398790 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398801 4693 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398831 4693 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398843 4693 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398855 4693 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398867 4693 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398879 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398890 4693 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398902 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398914 4693 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398926 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398939 4693 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398951 4693 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398963 4693 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398974 4693 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398986 4693 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.398997 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399008 4693 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399021 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399035 4693 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399049 4693 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399061 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399076 4693 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399087 4693 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399099 4693 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399113 4693 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399124 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399135 4693 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399147 4693 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399158 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399169 4693 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399181 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399193 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399204 4693 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399215 4693 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399231 4693 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399264 4693 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399275 4693 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399276 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399287 4693 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399299 4693 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399310 4693 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399322 4693 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399334 4693 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399346 4693 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399358 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399371 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399382 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399394 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399406 4693 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399419 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399432 4693 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399444 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399456 4693 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399468 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399479 4693 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399490 4693 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399503 4693 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399515 4693 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399526 4693 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399646 4693 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.399662 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.400321 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.400967 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401133 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401290 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401476 4693 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401491 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401503 4693 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401515 4693 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401526 4693 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401539 4693 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401559 4693 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401573 4693 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401585 4693 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401597 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401610 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401621 4693 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401633 4693 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401646 4693 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401659 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401671 4693 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401682 4693 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401693 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401704 4693 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401716 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401727 4693 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401739 4693 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401751 4693 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401762 4693 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401773 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401786 4693 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401796 4693 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401828 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401858 4693 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401891 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401903 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401915 4693 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401926 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401937 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401950 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401961 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401973 4693 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.401985 4693 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402004 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402016 4693 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402027 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402039 4693 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402052 4693 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402063 4693 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402075 4693 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402091 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402102 4693 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402114 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402127 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402138 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402149 4693 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402160 4693 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402171 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402184 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402194 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402205 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402216 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402227 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402239 4693 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402250 4693 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402262 4693 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402273 4693 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402284 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402295 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402306 4693 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402317 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402328 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.402695 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.403069 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.403208 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.403362 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.403413 4693 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.403429 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.403443 4693 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.403459 4693 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.403684 4693 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.403697 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.403711 4693 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.403724 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.403910 4693 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.403922 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.403934 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.403946 4693 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.403964 4693 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.404656 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.405099 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.405223 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.413041 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.413737 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.414603 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.418088 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.418662 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.419991 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.420587 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.422105 4693 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.422237 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.425119 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.425774 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.427606 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.428132 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.433539 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.435079 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.436176 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.437756 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.438737 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.440232 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.441307 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.444282 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.448998 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.449222 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.450045 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.451389 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.452454 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.453423 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.453964 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.454998 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.455506 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.456623 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.457300 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.457809 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.460367 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.471704 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.488601 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.502541 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.511873 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.513498 4693 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2" exitCode=255 Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.514027 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2"} Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.528180 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.528349 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-5hs96"] Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.528649 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-2tpvg"] Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.528848 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-5hs96" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.528868 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-2tpvg" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.532958 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.532992 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.533130 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.533255 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.533451 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.533487 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.533537 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.548238 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.561379 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.573830 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.600986 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.602312 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.605196 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sm9qp\" (UniqueName: \"kubernetes.io/projected/1cb57d74-a70a-40bf-98e4-3e075a97b049-kube-api-access-sm9qp\") pod \"node-resolver-2tpvg\" (UID: \"1cb57d74-a70a-40bf-98e4-3e075a97b049\") " pod="openshift-dns/node-resolver-2tpvg" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.605299 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2ac0c2cb-0a21-403d-82d2-a484eec44d7b-host\") pod \"node-ca-5hs96\" (UID: \"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\") " pod="openshift-image-registry/node-ca-5hs96" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.605328 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s985f\" (UniqueName: \"kubernetes.io/projected/2ac0c2cb-0a21-403d-82d2-a484eec44d7b-kube-api-access-s985f\") pod \"node-ca-5hs96\" (UID: \"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\") " pod="openshift-image-registry/node-ca-5hs96" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.605360 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/1cb57d74-a70a-40bf-98e4-3e075a97b049-hosts-file\") pod \"node-resolver-2tpvg\" (UID: \"1cb57d74-a70a-40bf-98e4-3e075a97b049\") " pod="openshift-dns/node-resolver-2tpvg" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.605387 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/2ac0c2cb-0a21-403d-82d2-a484eec44d7b-serviceca\") pod \"node-ca-5hs96\" (UID: \"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\") " pod="openshift-image-registry/node-ca-5hs96" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.606236 4693 scope.go:117] "RemoveContainer" containerID="5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.607251 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.615855 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.630526 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.641994 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.665079 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.668745 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.688795 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.700719 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.706655 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sm9qp\" (UniqueName: \"kubernetes.io/projected/1cb57d74-a70a-40bf-98e4-3e075a97b049-kube-api-access-sm9qp\") pod \"node-resolver-2tpvg\" (UID: \"1cb57d74-a70a-40bf-98e4-3e075a97b049\") " pod="openshift-dns/node-resolver-2tpvg" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.706714 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2ac0c2cb-0a21-403d-82d2-a484eec44d7b-host\") pod \"node-ca-5hs96\" (UID: \"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\") " pod="openshift-image-registry/node-ca-5hs96" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.706736 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s985f\" (UniqueName: \"kubernetes.io/projected/2ac0c2cb-0a21-403d-82d2-a484eec44d7b-kube-api-access-s985f\") pod \"node-ca-5hs96\" (UID: \"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\") " pod="openshift-image-registry/node-ca-5hs96" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.706754 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/1cb57d74-a70a-40bf-98e4-3e075a97b049-hosts-file\") pod \"node-resolver-2tpvg\" (UID: \"1cb57d74-a70a-40bf-98e4-3e075a97b049\") " pod="openshift-dns/node-resolver-2tpvg" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.706774 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/2ac0c2cb-0a21-403d-82d2-a484eec44d7b-serviceca\") pod \"node-ca-5hs96\" (UID: \"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\") " pod="openshift-image-registry/node-ca-5hs96" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.707803 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/2ac0c2cb-0a21-403d-82d2-a484eec44d7b-serviceca\") pod \"node-ca-5hs96\" (UID: \"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\") " pod="openshift-image-registry/node-ca-5hs96" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.707941 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2ac0c2cb-0a21-403d-82d2-a484eec44d7b-host\") pod \"node-ca-5hs96\" (UID: \"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\") " pod="openshift-image-registry/node-ca-5hs96" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.708124 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/1cb57d74-a70a-40bf-98e4-3e075a97b049-hosts-file\") pod \"node-resolver-2tpvg\" (UID: \"1cb57d74-a70a-40bf-98e4-3e075a97b049\") " pod="openshift-dns/node-resolver-2tpvg" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.714157 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:15 crc kubenswrapper[4693]: W1008 07:17:15.722127 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-f5b015719238fdaa263a00952d67e8248c9ba74a11eddffa7ed86eaeacf15322 WatchSource:0}: Error finding container f5b015719238fdaa263a00952d67e8248c9ba74a11eddffa7ed86eaeacf15322: Status 404 returned error can't find the container with id f5b015719238fdaa263a00952d67e8248c9ba74a11eddffa7ed86eaeacf15322 Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.727472 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sm9qp\" (UniqueName: \"kubernetes.io/projected/1cb57d74-a70a-40bf-98e4-3e075a97b049-kube-api-access-sm9qp\") pod \"node-resolver-2tpvg\" (UID: \"1cb57d74-a70a-40bf-98e4-3e075a97b049\") " pod="openshift-dns/node-resolver-2tpvg" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.730005 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s985f\" (UniqueName: \"kubernetes.io/projected/2ac0c2cb-0a21-403d-82d2-a484eec44d7b-kube-api-access-s985f\") pod \"node-ca-5hs96\" (UID: \"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\") " pod="openshift-image-registry/node-ca-5hs96" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.734619 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.741430 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.807495 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.807650 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:17:16.807631501 +0000 UTC m=+22.178596436 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.841657 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-5hs96" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.847672 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-2tpvg" Oct 08 07:17:15 crc kubenswrapper[4693]: W1008 07:17:15.872109 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1cb57d74_a70a_40bf_98e4_3e075a97b049.slice/crio-6c782e728f21ff3b8526b22bb8fa03776ec659c2bbbd0d67ab8e8438839ce0a8 WatchSource:0}: Error finding container 6c782e728f21ff3b8526b22bb8fa03776ec659c2bbbd0d67ab8e8438839ce0a8: Status 404 returned error can't find the container with id 6c782e728f21ff3b8526b22bb8fa03776ec659c2bbbd0d67ab8e8438839ce0a8 Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.922352 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.922395 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.922416 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:15 crc kubenswrapper[4693]: I1008 07:17:15.922438 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.922560 4693 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.922611 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-08 07:17:16.92259542 +0000 UTC m=+22.293560355 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.922910 4693 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.922940 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-08 07:17:16.922932259 +0000 UTC m=+22.293897194 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.922991 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.923003 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.923014 4693 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.923036 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-08 07:17:16.923030642 +0000 UTC m=+22.293995577 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.923075 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.923083 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.923089 4693 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:15 crc kubenswrapper[4693]: E1008 07:17:15.923108 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-08 07:17:16.923102554 +0000 UTC m=+22.294067489 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.006200 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-f7fh5"] Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.006801 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.008790 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.008970 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.009218 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.009498 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.010010 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-snt7l"] Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.010612 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.011554 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-bfhs8"] Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.011737 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.011761 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.014199 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.014323 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.014372 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.014493 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.014568 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.014584 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.018850 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.019632 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.019762 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.026217 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.045586 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.057261 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.067672 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.078116 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.089764 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.104520 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.120233 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.129526 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/0e017f3d-4e13-489e-9e9d-b5e6ec1d626a-os-release\") pod \"multus-additional-cni-plugins-f7fh5\" (UID: \"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\") " pod="openshift-multus/multus-additional-cni-plugins-f7fh5" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.129584 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/379c61a3-51ff-4bdf-ab8b-5af8bf090716-ovnkube-script-lib\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.129620 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-slash\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.129638 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0e017f3d-4e13-489e-9e9d-b5e6ec1d626a-system-cni-dir\") pod \"multus-additional-cni-plugins-f7fh5\" (UID: \"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\") " pod="openshift-multus/multus-additional-cni-plugins-f7fh5" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.129653 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-multus-conf-dir\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.129667 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-run-netns\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.129685 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-multus-cni-dir\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.129699 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-host-var-lib-cni-bin\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.129791 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-run-openvswitch\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.129849 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-etc-kubernetes\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.129868 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-node-log\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.129888 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-run-ovn-kubernetes\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.129907 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-cni-netd\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.129921 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/8ddc214e-6569-4b0e-8783-f484a001ce6a-cni-binary-copy\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.129959 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvtx9\" (UniqueName: \"kubernetes.io/projected/8ddc214e-6569-4b0e-8783-f484a001ce6a-kube-api-access-bvtx9\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.129976 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-var-lib-openvswitch\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.129991 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/379c61a3-51ff-4bdf-ab8b-5af8bf090716-env-overrides\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.130027 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qv25\" (UniqueName: \"kubernetes.io/projected/0e017f3d-4e13-489e-9e9d-b5e6ec1d626a-kube-api-access-4qv25\") pod \"multus-additional-cni-plugins-f7fh5\" (UID: \"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\") " pod="openshift-multus/multus-additional-cni-plugins-f7fh5" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.130043 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-os-release\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.130098 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-kubelet\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.130182 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqs6f\" (UniqueName: \"kubernetes.io/projected/379c61a3-51ff-4bdf-ab8b-5af8bf090716-kube-api-access-lqs6f\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.130201 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/0e017f3d-4e13-489e-9e9d-b5e6ec1d626a-cni-binary-copy\") pod \"multus-additional-cni-plugins-f7fh5\" (UID: \"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\") " pod="openshift-multus/multus-additional-cni-plugins-f7fh5" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.130218 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/0e017f3d-4e13-489e-9e9d-b5e6ec1d626a-tuning-conf-dir\") pod \"multus-additional-cni-plugins-f7fh5\" (UID: \"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\") " pod="openshift-multus/multus-additional-cni-plugins-f7fh5" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.130234 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-system-cni-dir\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.130273 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-cni-bin\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.130310 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-log-socket\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.130326 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-host-run-multus-certs\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.130366 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-systemd-units\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.130387 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-host-run-k8s-cni-cncf-io\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.130403 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-host-var-lib-kubelet\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.130419 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/0e017f3d-4e13-489e-9e9d-b5e6ec1d626a-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-f7fh5\" (UID: \"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\") " pod="openshift-multus/multus-additional-cni-plugins-f7fh5" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.130454 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/379c61a3-51ff-4bdf-ab8b-5af8bf090716-ovnkube-config\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.130480 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/0e017f3d-4e13-489e-9e9d-b5e6ec1d626a-cnibin\") pod \"multus-additional-cni-plugins-f7fh5\" (UID: \"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\") " pod="openshift-multus/multus-additional-cni-plugins-f7fh5" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.130515 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/8ddc214e-6569-4b0e-8783-f484a001ce6a-multus-daemon-config\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.130542 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-run-systemd\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.130586 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-etc-openvswitch\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.130601 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-run-ovn\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.130616 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/379c61a3-51ff-4bdf-ab8b-5af8bf090716-ovn-node-metrics-cert\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.130661 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-host-var-lib-cni-multus\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.130680 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-hostroot\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.130695 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.130740 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-cnibin\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.130766 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-multus-socket-dir-parent\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.130837 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-host-run-netns\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.141296 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.152923 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.164670 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.173150 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.182764 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.192221 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.199795 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.205924 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.217433 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.231659 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-slash\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.231704 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0e017f3d-4e13-489e-9e9d-b5e6ec1d626a-system-cni-dir\") pod \"multus-additional-cni-plugins-f7fh5\" (UID: \"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\") " pod="openshift-multus/multus-additional-cni-plugins-f7fh5" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.231720 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-multus-conf-dir\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.231739 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-run-netns\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.231761 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-multus-cni-dir\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.231778 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-host-var-lib-cni-bin\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.231792 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-run-openvswitch\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.231821 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-etc-kubernetes\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.231837 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-node-log\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.231852 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-run-ovn-kubernetes\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.231845 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0e017f3d-4e13-489e-9e9d-b5e6ec1d626a-system-cni-dir\") pod \"multus-additional-cni-plugins-f7fh5\" (UID: \"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\") " pod="openshift-multus/multus-additional-cni-plugins-f7fh5" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.231902 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-cni-netd\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.231866 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-cni-netd\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.231946 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-slash\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.231963 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/379c61a3-51ff-4bdf-ab8b-5af8bf090716-env-overrides\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.231985 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-host-var-lib-cni-bin\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.231992 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qv25\" (UniqueName: \"kubernetes.io/projected/0e017f3d-4e13-489e-9e9d-b5e6ec1d626a-kube-api-access-4qv25\") pod \"multus-additional-cni-plugins-f7fh5\" (UID: \"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\") " pod="openshift-multus/multus-additional-cni-plugins-f7fh5" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232017 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-os-release\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232025 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-multus-conf-dir\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232034 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/8ddc214e-6569-4b0e-8783-f484a001ce6a-cni-binary-copy\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232052 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvtx9\" (UniqueName: \"kubernetes.io/projected/8ddc214e-6569-4b0e-8783-f484a001ce6a-kube-api-access-bvtx9\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232069 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-var-lib-openvswitch\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232085 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-kubelet\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232295 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqs6f\" (UniqueName: \"kubernetes.io/projected/379c61a3-51ff-4bdf-ab8b-5af8bf090716-kube-api-access-lqs6f\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232310 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/0e017f3d-4e13-489e-9e9d-b5e6ec1d626a-cni-binary-copy\") pod \"multus-additional-cni-plugins-f7fh5\" (UID: \"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\") " pod="openshift-multus/multus-additional-cni-plugins-f7fh5" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232329 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/0e017f3d-4e13-489e-9e9d-b5e6ec1d626a-tuning-conf-dir\") pod \"multus-additional-cni-plugins-f7fh5\" (UID: \"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\") " pod="openshift-multus/multus-additional-cni-plugins-f7fh5" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232344 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-system-cni-dir\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232368 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-log-socket\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232385 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-cni-bin\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232400 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-multus-cni-dir\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232401 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-host-run-k8s-cni-cncf-io\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232423 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-host-run-k8s-cni-cncf-io\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232440 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-host-var-lib-kubelet\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232461 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-host-run-multus-certs\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232473 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-run-openvswitch\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232483 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-systemd-units\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232497 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-etc-kubernetes\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232508 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/0e017f3d-4e13-489e-9e9d-b5e6ec1d626a-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-f7fh5\" (UID: \"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\") " pod="openshift-multus/multus-additional-cni-plugins-f7fh5" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232519 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-node-log\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232545 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/379c61a3-51ff-4bdf-ab8b-5af8bf090716-ovnkube-config\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232562 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/8ddc214e-6569-4b0e-8783-f484a001ce6a-multus-daemon-config\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232600 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/0e017f3d-4e13-489e-9e9d-b5e6ec1d626a-cnibin\") pod \"multus-additional-cni-plugins-f7fh5\" (UID: \"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\") " pod="openshift-multus/multus-additional-cni-plugins-f7fh5" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232615 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-run-systemd\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232632 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-etc-openvswitch\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232649 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-run-ovn\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232665 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/379c61a3-51ff-4bdf-ab8b-5af8bf090716-ovn-node-metrics-cert\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232686 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-cnibin\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232704 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-multus-socket-dir-parent\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232719 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-host-run-netns\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232738 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-host-var-lib-cni-multus\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232753 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-hostroot\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232770 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232789 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/0e017f3d-4e13-489e-9e9d-b5e6ec1d626a-os-release\") pod \"multus-additional-cni-plugins-f7fh5\" (UID: \"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\") " pod="openshift-multus/multus-additional-cni-plugins-f7fh5" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232806 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/379c61a3-51ff-4bdf-ab8b-5af8bf090716-ovnkube-script-lib\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.233599 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/379c61a3-51ff-4bdf-ab8b-5af8bf090716-env-overrides\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.233665 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-etc-openvswitch\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.233686 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-os-release\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.233695 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-host-var-lib-kubelet\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.233712 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-host-run-multus-certs\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.233729 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-systemd-units\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232054 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-run-netns\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.234020 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/379c61a3-51ff-4bdf-ab8b-5af8bf090716-ovnkube-script-lib\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.234028 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-var-lib-openvswitch\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.234059 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-kubelet\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.234060 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-run-ovn\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.234100 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-system-cni-dir\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.234208 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/8ddc214e-6569-4b0e-8783-f484a001ce6a-cni-binary-copy\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.234253 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-log-socket\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.234266 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/0e017f3d-4e13-489e-9e9d-b5e6ec1d626a-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-f7fh5\" (UID: \"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\") " pod="openshift-multus/multus-additional-cni-plugins-f7fh5" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.234278 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-cni-bin\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.232546 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-run-ovn-kubernetes\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.234579 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/0e017f3d-4e13-489e-9e9d-b5e6ec1d626a-cni-binary-copy\") pod \"multus-additional-cni-plugins-f7fh5\" (UID: \"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\") " pod="openshift-multus/multus-additional-cni-plugins-f7fh5" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.234623 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/0e017f3d-4e13-489e-9e9d-b5e6ec1d626a-cnibin\") pod \"multus-additional-cni-plugins-f7fh5\" (UID: \"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\") " pod="openshift-multus/multus-additional-cni-plugins-f7fh5" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.234647 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-run-systemd\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.234668 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-host-var-lib-cni-multus\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.234695 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-cnibin\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.234696 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/379c61a3-51ff-4bdf-ab8b-5af8bf090716-ovnkube-config\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.234723 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.234733 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/8ddc214e-6569-4b0e-8783-f484a001ce6a-multus-daemon-config\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.234750 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-host-run-netns\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.234770 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/0e017f3d-4e13-489e-9e9d-b5e6ec1d626a-os-release\") pod \"multus-additional-cni-plugins-f7fh5\" (UID: \"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\") " pod="openshift-multus/multus-additional-cni-plugins-f7fh5" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.234778 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-multus-socket-dir-parent\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.234772 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/8ddc214e-6569-4b0e-8783-f484a001ce6a-hostroot\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.237304 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/0e017f3d-4e13-489e-9e9d-b5e6ec1d626a-tuning-conf-dir\") pod \"multus-additional-cni-plugins-f7fh5\" (UID: \"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\") " pod="openshift-multus/multus-additional-cni-plugins-f7fh5" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.237500 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.238373 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/379c61a3-51ff-4bdf-ab8b-5af8bf090716-ovn-node-metrics-cert\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.252518 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqs6f\" (UniqueName: \"kubernetes.io/projected/379c61a3-51ff-4bdf-ab8b-5af8bf090716-kube-api-access-lqs6f\") pod \"ovnkube-node-snt7l\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.254578 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qv25\" (UniqueName: \"kubernetes.io/projected/0e017f3d-4e13-489e-9e9d-b5e6ec1d626a-kube-api-access-4qv25\") pod \"multus-additional-cni-plugins-f7fh5\" (UID: \"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\") " pod="openshift-multus/multus-additional-cni-plugins-f7fh5" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.258514 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.265393 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvtx9\" (UniqueName: \"kubernetes.io/projected/8ddc214e-6569-4b0e-8783-f484a001ce6a-kube-api-access-bvtx9\") pod \"multus-bfhs8\" (UID: \"8ddc214e-6569-4b0e-8783-f484a001ce6a\") " pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.279934 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.314716 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.329216 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.335461 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.345277 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:16 crc kubenswrapper[4693]: W1008 07:17:16.360605 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod379c61a3_51ff_4bdf_ab8b_5af8bf090716.slice/crio-9e1f8fff952647c2eae83b9c29e2beb44f42fbc10a303450e6cf5c2410b3cc40 WatchSource:0}: Error finding container 9e1f8fff952647c2eae83b9c29e2beb44f42fbc10a303450e6cf5c2410b3cc40: Status 404 returned error can't find the container with id 9e1f8fff952647c2eae83b9c29e2beb44f42fbc10a303450e6cf5c2410b3cc40 Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.363270 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-bfhs8" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.366997 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: W1008 07:17:16.402160 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8ddc214e_6569_4b0e_8783_f484a001ce6a.slice/crio-6c2dbd123bde4933292988b1cdec2a7eea7fe028f564cb9ac1caa0b4ff553343 WatchSource:0}: Error finding container 6c2dbd123bde4933292988b1cdec2a7eea7fe028f564cb9ac1caa0b4ff553343: Status 404 returned error can't find the container with id 6c2dbd123bde4933292988b1cdec2a7eea7fe028f564cb9ac1caa0b4ff553343 Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.417414 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-xwrvr"] Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.417855 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.419703 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.419962 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.420086 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.420183 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.420559 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.428558 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.444232 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.459681 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.476904 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.485283 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.497609 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.509742 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.519605 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.524734 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec"} Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.524771 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5"} Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.524784 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"7af4f6a7b9f9205d57e25f068cc767c4ad789609362554f02863c44cca40a69b"} Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.525523 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" event={"ID":"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a","Type":"ContainerStarted","Data":"ff472b853c36fa49e9552aaf52df1f6ceebb2ee2e6c6741cbdb97cb4baa68aca"} Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.526402 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-5hs96" event={"ID":"2ac0c2cb-0a21-403d-82d2-a484eec44d7b","Type":"ContainerStarted","Data":"61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef"} Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.526426 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-5hs96" event={"ID":"2ac0c2cb-0a21-403d-82d2-a484eec44d7b","Type":"ContainerStarted","Data":"fb877b76cdbe9ae2e0ed99e7e11f55b9341397d347fe4108861384178186ac63"} Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.528511 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.531104 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e"} Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.531739 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.531983 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.535988 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/adebc2b6-0bd4-4c1c-8b8f-68a98012f490-mcd-auth-proxy-config\") pod \"machine-config-daemon-xwrvr\" (UID: \"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\") " pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.536060 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/adebc2b6-0bd4-4c1c-8b8f-68a98012f490-proxy-tls\") pod \"machine-config-daemon-xwrvr\" (UID: \"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\") " pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.536089 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cx99l\" (UniqueName: \"kubernetes.io/projected/adebc2b6-0bd4-4c1c-8b8f-68a98012f490-kube-api-access-cx99l\") pod \"machine-config-daemon-xwrvr\" (UID: \"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\") " pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.536165 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/adebc2b6-0bd4-4c1c-8b8f-68a98012f490-rootfs\") pod \"machine-config-daemon-xwrvr\" (UID: \"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\") " pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.541949 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-2tpvg" event={"ID":"1cb57d74-a70a-40bf-98e4-3e075a97b049","Type":"ContainerStarted","Data":"828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4"} Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.541990 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-2tpvg" event={"ID":"1cb57d74-a70a-40bf-98e4-3e075a97b049","Type":"ContainerStarted","Data":"6c782e728f21ff3b8526b22bb8fa03776ec659c2bbbd0d67ab8e8438839ce0a8"} Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.546553 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5"} Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.546608 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"e15136b9e478961825c3e98366df8103f70211acaee336651dc9211558b64f61"} Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.547588 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bfhs8" event={"ID":"8ddc214e-6569-4b0e-8783-f484a001ce6a","Type":"ContainerStarted","Data":"0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431"} Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.547657 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bfhs8" event={"ID":"8ddc214e-6569-4b0e-8783-f484a001ce6a","Type":"ContainerStarted","Data":"6c2dbd123bde4933292988b1cdec2a7eea7fe028f564cb9ac1caa0b4ff553343"} Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.547734 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.548989 4693 generic.go:334] "Generic (PLEG): container finished" podID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerID="4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c" exitCode=0 Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.549039 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" event={"ID":"379c61a3-51ff-4bdf-ab8b-5af8bf090716","Type":"ContainerDied","Data":"4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c"} Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.549060 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" event={"ID":"379c61a3-51ff-4bdf-ab8b-5af8bf090716","Type":"ContainerStarted","Data":"9e1f8fff952647c2eae83b9c29e2beb44f42fbc10a303450e6cf5c2410b3cc40"} Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.550455 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"f5b015719238fdaa263a00952d67e8248c9ba74a11eddffa7ed86eaeacf15322"} Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.561272 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.578338 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:16Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.609116 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:16Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.636707 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/adebc2b6-0bd4-4c1c-8b8f-68a98012f490-proxy-tls\") pod \"machine-config-daemon-xwrvr\" (UID: \"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\") " pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.636766 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cx99l\" (UniqueName: \"kubernetes.io/projected/adebc2b6-0bd4-4c1c-8b8f-68a98012f490-kube-api-access-cx99l\") pod \"machine-config-daemon-xwrvr\" (UID: \"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\") " pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.636986 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/adebc2b6-0bd4-4c1c-8b8f-68a98012f490-rootfs\") pod \"machine-config-daemon-xwrvr\" (UID: \"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\") " pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.637038 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/adebc2b6-0bd4-4c1c-8b8f-68a98012f490-mcd-auth-proxy-config\") pod \"machine-config-daemon-xwrvr\" (UID: \"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\") " pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.637173 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/adebc2b6-0bd4-4c1c-8b8f-68a98012f490-rootfs\") pod \"machine-config-daemon-xwrvr\" (UID: \"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\") " pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.638854 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/adebc2b6-0bd4-4c1c-8b8f-68a98012f490-mcd-auth-proxy-config\") pod \"machine-config-daemon-xwrvr\" (UID: \"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\") " pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.642034 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/adebc2b6-0bd4-4c1c-8b8f-68a98012f490-proxy-tls\") pod \"machine-config-daemon-xwrvr\" (UID: \"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\") " pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.650122 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:16Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.673073 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cx99l\" (UniqueName: \"kubernetes.io/projected/adebc2b6-0bd4-4c1c-8b8f-68a98012f490-kube-api-access-cx99l\") pod \"machine-config-daemon-xwrvr\" (UID: \"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\") " pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.710235 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:16Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.746410 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.753304 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:16Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:16 crc kubenswrapper[4693]: W1008 07:17:16.757979 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podadebc2b6_0bd4_4c1c_8b8f_68a98012f490.slice/crio-5ec53e600eeb512050c3077024b35c6edce4cd8e52b205a4515c50e086f349e3 WatchSource:0}: Error finding container 5ec53e600eeb512050c3077024b35c6edce4cd8e52b205a4515c50e086f349e3: Status 404 returned error can't find the container with id 5ec53e600eeb512050c3077024b35c6edce4cd8e52b205a4515c50e086f349e3 Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.795096 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:16Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.831257 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:16Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.838323 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:17:16 crc kubenswrapper[4693]: E1008 07:17:16.838492 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:17:18.83847613 +0000 UTC m=+24.209441065 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.867557 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:16Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.908471 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:16Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.939569 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.939608 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.939640 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.939664 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:16 crc kubenswrapper[4693]: E1008 07:17:16.939767 4693 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 08 07:17:16 crc kubenswrapper[4693]: E1008 07:17:16.939837 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-08 07:17:18.939820325 +0000 UTC m=+24.310785260 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 08 07:17:16 crc kubenswrapper[4693]: E1008 07:17:16.940159 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 08 07:17:16 crc kubenswrapper[4693]: E1008 07:17:16.940195 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 08 07:17:16 crc kubenswrapper[4693]: E1008 07:17:16.940192 4693 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 08 07:17:16 crc kubenswrapper[4693]: E1008 07:17:16.940213 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 08 07:17:16 crc kubenswrapper[4693]: E1008 07:17:16.940259 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 08 07:17:16 crc kubenswrapper[4693]: E1008 07:17:16.940277 4693 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:16 crc kubenswrapper[4693]: E1008 07:17:16.940211 4693 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:16 crc kubenswrapper[4693]: E1008 07:17:16.940309 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-08 07:17:18.940287457 +0000 UTC m=+24.311252392 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 08 07:17:16 crc kubenswrapper[4693]: E1008 07:17:16.940388 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-08 07:17:18.94036703 +0000 UTC m=+24.311331965 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:16 crc kubenswrapper[4693]: E1008 07:17:16.940401 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-08 07:17:18.9403959 +0000 UTC m=+24.311360835 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:16 crc kubenswrapper[4693]: I1008 07:17:16.946648 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:16Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.020504 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:17Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.037468 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:17Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.068435 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:17Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.107514 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:17Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.151138 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:17Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.195458 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:17Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.231170 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:17Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.268239 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:17Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.362277 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.362287 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:17 crc kubenswrapper[4693]: E1008 07:17:17.362454 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:17:17 crc kubenswrapper[4693]: E1008 07:17:17.362558 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.362287 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:17 crc kubenswrapper[4693]: E1008 07:17:17.362657 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.557655 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" event={"ID":"379c61a3-51ff-4bdf-ab8b-5af8bf090716","Type":"ContainerStarted","Data":"c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9"} Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.557720 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" event={"ID":"379c61a3-51ff-4bdf-ab8b-5af8bf090716","Type":"ContainerStarted","Data":"38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2"} Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.557735 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" event={"ID":"379c61a3-51ff-4bdf-ab8b-5af8bf090716","Type":"ContainerStarted","Data":"c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344"} Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.557746 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" event={"ID":"379c61a3-51ff-4bdf-ab8b-5af8bf090716","Type":"ContainerStarted","Data":"891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da"} Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.557756 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" event={"ID":"379c61a3-51ff-4bdf-ab8b-5af8bf090716","Type":"ContainerStarted","Data":"2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc"} Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.557767 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" event={"ID":"379c61a3-51ff-4bdf-ab8b-5af8bf090716","Type":"ContainerStarted","Data":"2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a"} Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.559061 4693 generic.go:334] "Generic (PLEG): container finished" podID="0e017f3d-4e13-489e-9e9d-b5e6ec1d626a" containerID="4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198" exitCode=0 Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.559131 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" event={"ID":"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a","Type":"ContainerDied","Data":"4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198"} Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.561109 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerStarted","Data":"7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf"} Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.561153 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerStarted","Data":"f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75"} Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.561168 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerStarted","Data":"5ec53e600eeb512050c3077024b35c6edce4cd8e52b205a4515c50e086f349e3"} Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.573052 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:17Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.590764 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:17Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.609891 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:17Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.636069 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:17Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.653654 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:17Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.672570 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:17Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.683976 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:17Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.694917 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:17Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.706111 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:17Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.718889 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:17Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.734677 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:17Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.782120 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:17Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.803136 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:17Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.831217 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:17Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.866236 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:17Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.907377 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:17Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.946522 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:17Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:17 crc kubenswrapper[4693]: I1008 07:17:17.988234 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:17Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.025964 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:18Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.072456 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:18Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.115214 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:18Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.149779 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:18Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.189922 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:18Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.232095 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:18Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.268127 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:18Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.309248 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:18Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.356133 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:18Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.392128 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:18Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.568063 4693 generic.go:334] "Generic (PLEG): container finished" podID="0e017f3d-4e13-489e-9e9d-b5e6ec1d626a" containerID="2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa" exitCode=0 Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.568181 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" event={"ID":"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a","Type":"ContainerDied","Data":"2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa"} Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.570927 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670"} Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.584398 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:18Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.606299 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:18Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.640294 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:18Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.653621 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:18Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.667946 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:18Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.685778 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:18Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.701264 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:18Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.717336 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:18Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.752024 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:18Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.796587 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:18Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.833761 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:18Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.861467 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:17:18 crc kubenswrapper[4693]: E1008 07:17:18.861738 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:17:22.861688538 +0000 UTC m=+28.232653493 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.875889 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:18Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.910262 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:18Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.949618 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:18Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.962318 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.962367 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.962392 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.962430 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:18 crc kubenswrapper[4693]: E1008 07:17:18.962558 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 08 07:17:18 crc kubenswrapper[4693]: E1008 07:17:18.962580 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 08 07:17:18 crc kubenswrapper[4693]: E1008 07:17:18.962595 4693 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:18 crc kubenswrapper[4693]: E1008 07:17:18.962643 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-08 07:17:22.962626513 +0000 UTC m=+28.333591458 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:18 crc kubenswrapper[4693]: E1008 07:17:18.962964 4693 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 08 07:17:18 crc kubenswrapper[4693]: E1008 07:17:18.963044 4693 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 08 07:17:18 crc kubenswrapper[4693]: E1008 07:17:18.963069 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 08 07:17:18 crc kubenswrapper[4693]: E1008 07:17:18.963131 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 08 07:17:18 crc kubenswrapper[4693]: E1008 07:17:18.963081 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-08 07:17:22.963052674 +0000 UTC m=+28.334017609 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 08 07:17:18 crc kubenswrapper[4693]: E1008 07:17:18.963148 4693 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:18 crc kubenswrapper[4693]: E1008 07:17:18.963182 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-08 07:17:22.963151227 +0000 UTC m=+28.334116172 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 08 07:17:18 crc kubenswrapper[4693]: E1008 07:17:18.963220 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-08 07:17:22.963193438 +0000 UTC m=+28.334158383 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:18 crc kubenswrapper[4693]: I1008 07:17:18.985897 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:18Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.036099 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:19Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.072228 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:19Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.105176 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:19Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.148284 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:19Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.191619 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:19Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.230242 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:19Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.270976 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:19Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.314955 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:19Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.349366 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:19Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.362624 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.362640 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.362778 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:19 crc kubenswrapper[4693]: E1008 07:17:19.362975 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:17:19 crc kubenswrapper[4693]: E1008 07:17:19.363189 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:17:19 crc kubenswrapper[4693]: E1008 07:17:19.363366 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.387542 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:19Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.426695 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:19Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.485315 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:19Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.518902 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:19Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.575733 4693 generic.go:334] "Generic (PLEG): container finished" podID="0e017f3d-4e13-489e-9e9d-b5e6ec1d626a" containerID="3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2" exitCode=0 Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.575797 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" event={"ID":"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a","Type":"ContainerDied","Data":"3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2"} Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.579778 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" event={"ID":"379c61a3-51ff-4bdf-ab8b-5af8bf090716","Type":"ContainerStarted","Data":"9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9"} Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.591064 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:19Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.610415 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:19Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.633441 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:19Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.670014 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:19Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.708022 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:19Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.749607 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:19Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.788096 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:19Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.832323 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:19Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.871457 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:19Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.918184 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:19Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.963333 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:19Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:19 crc kubenswrapper[4693]: I1008 07:17:19.997533 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:19Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:20 crc kubenswrapper[4693]: I1008 07:17:20.042028 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:20Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:20 crc kubenswrapper[4693]: I1008 07:17:20.073458 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:20Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:20 crc kubenswrapper[4693]: I1008 07:17:20.588669 4693 generic.go:334] "Generic (PLEG): container finished" podID="0e017f3d-4e13-489e-9e9d-b5e6ec1d626a" containerID="c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a" exitCode=0 Oct 08 07:17:20 crc kubenswrapper[4693]: I1008 07:17:20.588742 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" event={"ID":"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a","Type":"ContainerDied","Data":"c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a"} Oct 08 07:17:20 crc kubenswrapper[4693]: I1008 07:17:20.608232 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:20Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:20 crc kubenswrapper[4693]: I1008 07:17:20.635858 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:20Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:20 crc kubenswrapper[4693]: I1008 07:17:20.677766 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:20Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:20 crc kubenswrapper[4693]: I1008 07:17:20.694317 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:20Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:20 crc kubenswrapper[4693]: I1008 07:17:20.712749 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:20Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:20 crc kubenswrapper[4693]: I1008 07:17:20.732874 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:20Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:20 crc kubenswrapper[4693]: I1008 07:17:20.750749 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:20Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:20 crc kubenswrapper[4693]: I1008 07:17:20.771629 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:20Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:20 crc kubenswrapper[4693]: I1008 07:17:20.791547 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:20Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:20 crc kubenswrapper[4693]: I1008 07:17:20.810459 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:20Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:20 crc kubenswrapper[4693]: I1008 07:17:20.825380 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:20Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:20 crc kubenswrapper[4693]: I1008 07:17:20.845610 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:20Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:20 crc kubenswrapper[4693]: I1008 07:17:20.860394 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:20Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:20 crc kubenswrapper[4693]: I1008 07:17:20.879275 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:20Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:20 crc kubenswrapper[4693]: I1008 07:17:20.987522 4693 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 08 07:17:20 crc kubenswrapper[4693]: I1008 07:17:20.989986 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:20 crc kubenswrapper[4693]: I1008 07:17:20.990047 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:20 crc kubenswrapper[4693]: I1008 07:17:20.990066 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:20 crc kubenswrapper[4693]: I1008 07:17:20.990171 4693 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.000556 4693 kubelet_node_status.go:115] "Node was previously registered" node="crc" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.001040 4693 kubelet_node_status.go:79] "Successfully registered node" node="crc" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.002580 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.002623 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.002641 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.002663 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.002683 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:21Z","lastTransitionTime":"2025-10-08T07:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:21 crc kubenswrapper[4693]: E1008 07:17:21.018222 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:21Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.022852 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.022894 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.022909 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.022930 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.022948 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:21Z","lastTransitionTime":"2025-10-08T07:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:21 crc kubenswrapper[4693]: E1008 07:17:21.043268 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:21Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.049593 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.049653 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.049670 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.049701 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.049722 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:21Z","lastTransitionTime":"2025-10-08T07:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:21 crc kubenswrapper[4693]: E1008 07:17:21.066466 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:21Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.071247 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.071320 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.071339 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.071367 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.071386 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:21Z","lastTransitionTime":"2025-10-08T07:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:21 crc kubenswrapper[4693]: E1008 07:17:21.090227 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:21Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.096634 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.096687 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.096705 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.096731 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.096751 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:21Z","lastTransitionTime":"2025-10-08T07:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:21 crc kubenswrapper[4693]: E1008 07:17:21.117403 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:21Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:21 crc kubenswrapper[4693]: E1008 07:17:21.117656 4693 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.120286 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.120348 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.120370 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.120398 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.120420 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:21Z","lastTransitionTime":"2025-10-08T07:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.223336 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.223400 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.223417 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.223444 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.223462 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:21Z","lastTransitionTime":"2025-10-08T07:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.326767 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.326846 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.326866 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.326892 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.326912 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:21Z","lastTransitionTime":"2025-10-08T07:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.362675 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:21 crc kubenswrapper[4693]: E1008 07:17:21.363232 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.363962 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:21 crc kubenswrapper[4693]: E1008 07:17:21.364072 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.364098 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:21 crc kubenswrapper[4693]: E1008 07:17:21.364148 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.429790 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.429854 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.429868 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.429889 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.429904 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:21Z","lastTransitionTime":"2025-10-08T07:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.533943 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.534009 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.534027 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.534056 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.534085 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:21Z","lastTransitionTime":"2025-10-08T07:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.600106 4693 generic.go:334] "Generic (PLEG): container finished" podID="0e017f3d-4e13-489e-9e9d-b5e6ec1d626a" containerID="3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea" exitCode=0 Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.600185 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" event={"ID":"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a","Type":"ContainerDied","Data":"3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea"} Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.608961 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" event={"ID":"379c61a3-51ff-4bdf-ab8b-5af8bf090716","Type":"ContainerStarted","Data":"2cd5b6b75a586340ae47d2b2c9593698512bee2dd7381e83c45c7f06573f5955"} Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.609392 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.626380 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:21Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.637044 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.637102 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.637126 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.637161 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.637184 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:21Z","lastTransitionTime":"2025-10-08T07:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.687274 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:21Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.687976 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.706909 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:21Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.726080 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:21Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.739944 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.739969 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.739983 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.740000 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.740011 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:21Z","lastTransitionTime":"2025-10-08T07:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.757220 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:21Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.773341 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:21Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.792728 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:21Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.811495 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:21Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.826446 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:21Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.840224 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:21Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.843033 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.843105 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.843119 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.843139 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.843196 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:21Z","lastTransitionTime":"2025-10-08T07:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.860140 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:21Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.880214 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:21Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.894586 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:21Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.910832 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:21Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.929713 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:21Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.946324 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.946378 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.946391 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.946411 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.946429 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:21Z","lastTransitionTime":"2025-10-08T07:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.957690 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:21Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:21 crc kubenswrapper[4693]: I1008 07:17:21.979155 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:21Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.002409 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:21Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.020645 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.044493 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.052800 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.052950 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.052975 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.052999 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.053016 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:22Z","lastTransitionTime":"2025-10-08T07:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.070167 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.101712 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cd5b6b75a586340ae47d2b2c9593698512bee2dd7381e83c45c7f06573f5955\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.120958 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.141247 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.156884 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.157190 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.157343 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.157487 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.157617 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:22Z","lastTransitionTime":"2025-10-08T07:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.163215 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.178900 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.197382 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.216888 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.260941 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.261072 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.261092 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.261122 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.261141 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:22Z","lastTransitionTime":"2025-10-08T07:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.364796 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.364882 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.364898 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.364961 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.364983 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:22Z","lastTransitionTime":"2025-10-08T07:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.468499 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.468582 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.468604 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.468634 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.468652 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:22Z","lastTransitionTime":"2025-10-08T07:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.572369 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.572443 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.572466 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.572495 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.572514 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:22Z","lastTransitionTime":"2025-10-08T07:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.621374 4693 generic.go:334] "Generic (PLEG): container finished" podID="0e017f3d-4e13-489e-9e9d-b5e6ec1d626a" containerID="9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1" exitCode=0 Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.621516 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" event={"ID":"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a","Type":"ContainerDied","Data":"9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1"} Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.621921 4693 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.622613 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.645380 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.662045 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.668954 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.676315 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.676370 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.676391 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.676416 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.676431 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:22Z","lastTransitionTime":"2025-10-08T07:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.690655 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.712149 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.731919 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.751250 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.767995 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.778977 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.779019 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.779031 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.779050 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.779063 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:22Z","lastTransitionTime":"2025-10-08T07:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.788509 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.811607 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cd5b6b75a586340ae47d2b2c9593698512bee2dd7381e83c45c7f06573f5955\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.831584 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.847570 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.865117 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.882337 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.882370 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.882381 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.882398 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.882410 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:22Z","lastTransitionTime":"2025-10-08T07:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.883872 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.899160 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.905674 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:17:22 crc kubenswrapper[4693]: E1008 07:17:22.905963 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:17:30.905916237 +0000 UTC m=+36.276881212 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.918934 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.940474 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.969747 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cd5b6b75a586340ae47d2b2c9593698512bee2dd7381e83c45c7f06573f5955\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.984777 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.985217 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.985291 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.985309 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.985334 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:22 crc kubenswrapper[4693]: I1008 07:17:22.985353 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:22Z","lastTransitionTime":"2025-10-08T07:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.001053 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.006905 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.006968 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.007034 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.007082 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:23 crc kubenswrapper[4693]: E1008 07:17:23.007096 4693 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 08 07:17:23 crc kubenswrapper[4693]: E1008 07:17:23.007195 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-08 07:17:31.00716535 +0000 UTC m=+36.378130325 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 08 07:17:23 crc kubenswrapper[4693]: E1008 07:17:23.007246 4693 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 08 07:17:23 crc kubenswrapper[4693]: E1008 07:17:23.007321 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 08 07:17:23 crc kubenswrapper[4693]: E1008 07:17:23.007371 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 08 07:17:23 crc kubenswrapper[4693]: E1008 07:17:23.007370 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 08 07:17:23 crc kubenswrapper[4693]: E1008 07:17:23.007424 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 08 07:17:23 crc kubenswrapper[4693]: E1008 07:17:23.007444 4693 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:23 crc kubenswrapper[4693]: E1008 07:17:23.007395 4693 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:23 crc kubenswrapper[4693]: E1008 07:17:23.007332 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-08 07:17:31.007305193 +0000 UTC m=+36.378270158 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 08 07:17:23 crc kubenswrapper[4693]: E1008 07:17:23.007609 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-08 07:17:31.007575661 +0000 UTC m=+36.378540626 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:23 crc kubenswrapper[4693]: E1008 07:17:23.007634 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-08 07:17:31.007623432 +0000 UTC m=+36.378588397 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.024753 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:23Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.044099 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:23Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.057848 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:23Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.075316 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:23Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.089002 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.089061 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.089083 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.089110 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.089128 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:23Z","lastTransitionTime":"2025-10-08T07:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.097455 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:23Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.115516 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:23Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.135523 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:23Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.154393 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:23Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.171319 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:23Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.192958 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.193034 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.193061 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.193093 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.193116 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:23Z","lastTransitionTime":"2025-10-08T07:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.296800 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.296906 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.296923 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.296955 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.296976 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:23Z","lastTransitionTime":"2025-10-08T07:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.362989 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.363062 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:23 crc kubenswrapper[4693]: E1008 07:17:23.363189 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.363220 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:23 crc kubenswrapper[4693]: E1008 07:17:23.363488 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:17:23 crc kubenswrapper[4693]: E1008 07:17:23.363526 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.399251 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.399318 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.399337 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.399365 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.399383 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:23Z","lastTransitionTime":"2025-10-08T07:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.502750 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.502846 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.502866 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.502893 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.502913 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:23Z","lastTransitionTime":"2025-10-08T07:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.606029 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.606090 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.606110 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.606168 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.606188 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:23Z","lastTransitionTime":"2025-10-08T07:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.635443 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" event={"ID":"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a","Type":"ContainerStarted","Data":"6773a0911b37e7426179f4daf47411d07b37431c9952fd3322c1a5abfb8cd10e"} Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.635560 4693 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.656569 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:23Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.677651 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:23Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.698735 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:23Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.709560 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.709632 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.709691 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.709719 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.709736 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:23Z","lastTransitionTime":"2025-10-08T07:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.716717 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:23Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.737802 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:23Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.758769 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:23Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.785328 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:23Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.805613 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:23Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.813131 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.813191 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.813206 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.813229 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.813246 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:23Z","lastTransitionTime":"2025-10-08T07:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.825915 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:23Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.845535 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:23Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.864580 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:23Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.884839 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:23Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.909730 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6773a0911b37e7426179f4daf47411d07b37431c9952fd3322c1a5abfb8cd10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:23Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.915895 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.915946 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.915965 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.915992 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.916012 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:23Z","lastTransitionTime":"2025-10-08T07:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:23 crc kubenswrapper[4693]: I1008 07:17:23.941118 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cd5b6b75a586340ae47d2b2c9593698512bee2dd7381e83c45c7f06573f5955\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:23Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.019301 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.019390 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.019416 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.019451 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.019477 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:24Z","lastTransitionTime":"2025-10-08T07:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.122650 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.122707 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.122719 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.122739 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.122753 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:24Z","lastTransitionTime":"2025-10-08T07:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.225529 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.225593 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.225612 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.225639 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.225657 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:24Z","lastTransitionTime":"2025-10-08T07:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.329171 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.329245 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.329269 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.329302 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.329326 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:24Z","lastTransitionTime":"2025-10-08T07:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.432613 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.432687 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.432710 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.432741 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.432763 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:24Z","lastTransitionTime":"2025-10-08T07:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.535390 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.535442 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.535454 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.535474 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.535489 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:24Z","lastTransitionTime":"2025-10-08T07:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.638955 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.639006 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.639024 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.639059 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.639078 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:24Z","lastTransitionTime":"2025-10-08T07:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.639743 4693 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.742522 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.742586 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.742604 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.742629 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.742647 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:24Z","lastTransitionTime":"2025-10-08T07:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.846228 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.846787 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.846805 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.846871 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.846891 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:24Z","lastTransitionTime":"2025-10-08T07:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.949458 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.949526 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.949544 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.949573 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:24 crc kubenswrapper[4693]: I1008 07:17:24.949592 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:24Z","lastTransitionTime":"2025-10-08T07:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.053211 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.053297 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.053322 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.053354 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.053376 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:25Z","lastTransitionTime":"2025-10-08T07:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.156628 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.156669 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.156682 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.156700 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.156712 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:25Z","lastTransitionTime":"2025-10-08T07:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.259596 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.259678 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.259700 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.259732 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.259754 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:25Z","lastTransitionTime":"2025-10-08T07:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.362182 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.362289 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:25 crc kubenswrapper[4693]: E1008 07:17:25.362364 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:17:25 crc kubenswrapper[4693]: E1008 07:17:25.362512 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.362766 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:25 crc kubenswrapper[4693]: E1008 07:17:25.362966 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.363199 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.363277 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.363295 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.363893 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.363959 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:25Z","lastTransitionTime":"2025-10-08T07:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.388475 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.412803 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.433411 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.454015 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.467082 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.467151 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.467174 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.467208 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.467231 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:25Z","lastTransitionTime":"2025-10-08T07:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.472870 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.490994 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.510861 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.535744 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6773a0911b37e7426179f4daf47411d07b37431c9952fd3322c1a5abfb8cd10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.569906 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.569943 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.569954 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.569972 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.569984 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:25Z","lastTransitionTime":"2025-10-08T07:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.571388 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cd5b6b75a586340ae47d2b2c9593698512bee2dd7381e83c45c7f06573f5955\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.591209 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.609013 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.630183 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.646016 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-snt7l_379c61a3-51ff-4bdf-ab8b-5af8bf090716/ovnkube-controller/0.log" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.650795 4693 generic.go:334] "Generic (PLEG): container finished" podID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerID="2cd5b6b75a586340ae47d2b2c9593698512bee2dd7381e83c45c7f06573f5955" exitCode=1 Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.650864 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" event={"ID":"379c61a3-51ff-4bdf-ab8b-5af8bf090716","Type":"ContainerDied","Data":"2cd5b6b75a586340ae47d2b2c9593698512bee2dd7381e83c45c7f06573f5955"} Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.651146 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.652271 4693 scope.go:117] "RemoveContainer" containerID="2cd5b6b75a586340ae47d2b2c9593698512bee2dd7381e83c45c7f06573f5955" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.668216 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.672470 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.672772 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.673028 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.673302 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.673510 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:25Z","lastTransitionTime":"2025-10-08T07:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.683850 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.707013 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6773a0911b37e7426179f4daf47411d07b37431c9952fd3322c1a5abfb8cd10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.733278 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cd5b6b75a586340ae47d2b2c9593698512bee2dd7381e83c45c7f06573f5955\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cd5b6b75a586340ae47d2b2c9593698512bee2dd7381e83c45c7f06573f5955\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:17:25Z\\\",\\\"message\\\":\\\"handler.go:208] Removed *v1.Pod event handler 3\\\\nI1008 07:17:25.119674 5926 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1008 07:17:25.119943 5926 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1008 07:17:25.119946 5926 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1008 07:17:25.119961 5926 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1008 07:17:25.119958 5926 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1008 07:17:25.120044 5926 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1008 07:17:25.120114 5926 factory.go:656] Stopping watch factory\\\\nI1008 07:17:25.120123 5926 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1008 07:17:25.120138 5926 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1008 07:17:25.120170 5926 handler.go:208] Removed *v1.Node event handler 7\\\\nI1008 07:17:25.120187 5926 handler.go:208] Removed *v1.Node event handler 2\\\\nI1008 07:17:25.120291 5926 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.758071 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.778158 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.778264 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.778302 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.778357 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.778371 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:25Z","lastTransitionTime":"2025-10-08T07:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.780180 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.796651 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.811849 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.826273 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.843615 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.862058 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.879883 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.881388 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.881437 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.881456 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.881483 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.881503 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:25Z","lastTransitionTime":"2025-10-08T07:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.900297 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.921753 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.961996 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.984965 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.985035 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.985100 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.985130 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:25 crc kubenswrapper[4693]: I1008 07:17:25.985147 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:25Z","lastTransitionTime":"2025-10-08T07:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.088721 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.088767 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.088777 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.088794 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.088807 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:26Z","lastTransitionTime":"2025-10-08T07:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.191555 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.191641 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.191665 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.191701 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.191721 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:26Z","lastTransitionTime":"2025-10-08T07:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.295555 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.295639 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.295669 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.295701 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.295725 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:26Z","lastTransitionTime":"2025-10-08T07:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.397900 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.398255 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.398268 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.398284 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.398295 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:26Z","lastTransitionTime":"2025-10-08T07:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.500437 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.500475 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.500486 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.500501 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.500511 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:26Z","lastTransitionTime":"2025-10-08T07:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.603496 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.603539 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.603552 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.603573 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.603583 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:26Z","lastTransitionTime":"2025-10-08T07:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.656768 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-snt7l_379c61a3-51ff-4bdf-ab8b-5af8bf090716/ovnkube-controller/0.log" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.659804 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" event={"ID":"379c61a3-51ff-4bdf-ab8b-5af8bf090716","Type":"ContainerStarted","Data":"aa6dfa89c0cadba9b079edef5fc3e802621d10a85bdeaa85f94acc20fb825d95"} Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.659971 4693 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.674182 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:26Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.693452 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:26Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.705910 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.705976 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.705997 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.706028 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.706044 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:26Z","lastTransitionTime":"2025-10-08T07:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.711361 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:26Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.728484 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:26Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.741250 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:26Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.758591 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:26Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.772859 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:26Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.790192 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6773a0911b37e7426179f4daf47411d07b37431c9952fd3322c1a5abfb8cd10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:26Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.809115 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.809180 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.809198 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.809230 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.809248 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:26Z","lastTransitionTime":"2025-10-08T07:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.817199 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6dfa89c0cadba9b079edef5fc3e802621d10a85bdeaa85f94acc20fb825d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cd5b6b75a586340ae47d2b2c9593698512bee2dd7381e83c45c7f06573f5955\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:17:25Z\\\",\\\"message\\\":\\\"handler.go:208] Removed *v1.Pod event handler 3\\\\nI1008 07:17:25.119674 5926 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1008 07:17:25.119943 5926 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1008 07:17:25.119946 5926 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1008 07:17:25.119961 5926 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1008 07:17:25.119958 5926 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1008 07:17:25.120044 5926 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1008 07:17:25.120114 5926 factory.go:656] Stopping watch factory\\\\nI1008 07:17:25.120123 5926 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1008 07:17:25.120138 5926 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1008 07:17:25.120170 5926 handler.go:208] Removed *v1.Node event handler 7\\\\nI1008 07:17:25.120187 5926 handler.go:208] Removed *v1.Node event handler 2\\\\nI1008 07:17:25.120291 5926 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:26Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.844672 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:26Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.863700 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:26Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.882281 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:26Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.892984 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:26Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.907135 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:26Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.912799 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.912857 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.912870 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.912892 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:26 crc kubenswrapper[4693]: I1008 07:17:26.912905 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:26Z","lastTransitionTime":"2025-10-08T07:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.016105 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.016159 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.016170 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.016193 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.016206 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:27Z","lastTransitionTime":"2025-10-08T07:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.119176 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.119269 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.119287 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.119312 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.119328 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:27Z","lastTransitionTime":"2025-10-08T07:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.222024 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.222082 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.222096 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.222117 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.222134 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:27Z","lastTransitionTime":"2025-10-08T07:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.325350 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.325409 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.325417 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.325437 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.325451 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:27Z","lastTransitionTime":"2025-10-08T07:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.362454 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.362552 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:27 crc kubenswrapper[4693]: E1008 07:17:27.362666 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:17:27 crc kubenswrapper[4693]: E1008 07:17:27.362733 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.362781 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:27 crc kubenswrapper[4693]: E1008 07:17:27.362877 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.428988 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.429061 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.429073 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.429104 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.429118 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:27Z","lastTransitionTime":"2025-10-08T07:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.532388 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.532439 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.532451 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.532470 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.532482 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:27Z","lastTransitionTime":"2025-10-08T07:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.635611 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.635685 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.635708 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.635773 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.635800 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:27Z","lastTransitionTime":"2025-10-08T07:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.666194 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-snt7l_379c61a3-51ff-4bdf-ab8b-5af8bf090716/ovnkube-controller/1.log" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.666998 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-snt7l_379c61a3-51ff-4bdf-ab8b-5af8bf090716/ovnkube-controller/0.log" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.670880 4693 generic.go:334] "Generic (PLEG): container finished" podID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerID="aa6dfa89c0cadba9b079edef5fc3e802621d10a85bdeaa85f94acc20fb825d95" exitCode=1 Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.670944 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" event={"ID":"379c61a3-51ff-4bdf-ab8b-5af8bf090716","Type":"ContainerDied","Data":"aa6dfa89c0cadba9b079edef5fc3e802621d10a85bdeaa85f94acc20fb825d95"} Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.671009 4693 scope.go:117] "RemoveContainer" containerID="2cd5b6b75a586340ae47d2b2c9593698512bee2dd7381e83c45c7f06573f5955" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.672344 4693 scope.go:117] "RemoveContainer" containerID="aa6dfa89c0cadba9b079edef5fc3e802621d10a85bdeaa85f94acc20fb825d95" Oct 08 07:17:27 crc kubenswrapper[4693]: E1008 07:17:27.672657 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-snt7l_openshift-ovn-kubernetes(379c61a3-51ff-4bdf-ab8b-5af8bf090716)\"" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.698326 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6dfa89c0cadba9b079edef5fc3e802621d10a85bdeaa85f94acc20fb825d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cd5b6b75a586340ae47d2b2c9593698512bee2dd7381e83c45c7f06573f5955\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:17:25Z\\\",\\\"message\\\":\\\"handler.go:208] Removed *v1.Pod event handler 3\\\\nI1008 07:17:25.119674 5926 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1008 07:17:25.119943 5926 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1008 07:17:25.119946 5926 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1008 07:17:25.119961 5926 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1008 07:17:25.119958 5926 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1008 07:17:25.120044 5926 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1008 07:17:25.120114 5926 factory.go:656] Stopping watch factory\\\\nI1008 07:17:25.120123 5926 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1008 07:17:25.120138 5926 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1008 07:17:25.120170 5926 handler.go:208] Removed *v1.Node event handler 7\\\\nI1008 07:17:25.120187 5926 handler.go:208] Removed *v1.Node event handler 2\\\\nI1008 07:17:25.120291 5926 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa6dfa89c0cadba9b079edef5fc3e802621d10a85bdeaa85f94acc20fb825d95\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:17:26Z\\\",\\\"message\\\":\\\"de?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:26Z is after 2025-08-24T17:21:41Z]\\\\nI1008 07:17:26.857171 6088 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1008 07:17:26.857158 6088 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-daemon]} name:Service_openshift-machine-config-operator/machine-config-daemon_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.43:8798: 10.217.4.43:9001:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {a36f6289-d09f-43f8-8a8a-c9d2cc11eb0d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1008 07:17:26.857183 6088 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1008 07:17:26.857140 6088 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/contr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:27Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.715931 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:27Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.734699 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6773a0911b37e7426179f4daf47411d07b37431c9952fd3322c1a5abfb8cd10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:27Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.739367 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.739465 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.739494 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.739535 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.739562 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:27Z","lastTransitionTime":"2025-10-08T07:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.757435 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:27Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.769258 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:27Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.779639 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:27Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.792580 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:27Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.804116 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:27Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.816052 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:27Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.833644 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:27Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.842105 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.842177 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.842195 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.842221 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.842236 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:27Z","lastTransitionTime":"2025-10-08T07:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.852455 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.852734 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:27Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.870784 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:27Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.884951 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:27Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.897088 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:27Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.944889 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.944976 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.944996 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.945022 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:27 crc kubenswrapper[4693]: I1008 07:17:27.945040 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:27Z","lastTransitionTime":"2025-10-08T07:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.047264 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.047320 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.047330 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.047351 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.047362 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:28Z","lastTransitionTime":"2025-10-08T07:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.151034 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.151094 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.151107 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.151128 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.151142 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:28Z","lastTransitionTime":"2025-10-08T07:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.253831 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.253867 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.253877 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.253895 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.253924 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:28Z","lastTransitionTime":"2025-10-08T07:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.357236 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.357293 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.357310 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.357335 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.357353 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:28Z","lastTransitionTime":"2025-10-08T07:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.461530 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.461582 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.461592 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.461607 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.461619 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:28Z","lastTransitionTime":"2025-10-08T07:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.565003 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.565076 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.565094 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.565119 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.565136 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:28Z","lastTransitionTime":"2025-10-08T07:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.667360 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.667404 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.667414 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.667431 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.667443 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:28Z","lastTransitionTime":"2025-10-08T07:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.675609 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-snt7l_379c61a3-51ff-4bdf-ab8b-5af8bf090716/ovnkube-controller/1.log" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.679268 4693 scope.go:117] "RemoveContainer" containerID="aa6dfa89c0cadba9b079edef5fc3e802621d10a85bdeaa85f94acc20fb825d95" Oct 08 07:17:28 crc kubenswrapper[4693]: E1008 07:17:28.679519 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-snt7l_openshift-ovn-kubernetes(379c61a3-51ff-4bdf-ab8b-5af8bf090716)\"" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.694299 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:28Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.708276 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:28Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.721590 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:28Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.736909 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6773a0911b37e7426179f4daf47411d07b37431c9952fd3322c1a5abfb8cd10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:28Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.766221 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6dfa89c0cadba9b079edef5fc3e802621d10a85bdeaa85f94acc20fb825d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa6dfa89c0cadba9b079edef5fc3e802621d10a85bdeaa85f94acc20fb825d95\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:17:26Z\\\",\\\"message\\\":\\\"de?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:26Z is after 2025-08-24T17:21:41Z]\\\\nI1008 07:17:26.857171 6088 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1008 07:17:26.857158 6088 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-daemon]} name:Service_openshift-machine-config-operator/machine-config-daemon_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.43:8798: 10.217.4.43:9001:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {a36f6289-d09f-43f8-8a8a-c9d2cc11eb0d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1008 07:17:26.857183 6088 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1008 07:17:26.857140 6088 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/contr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-snt7l_openshift-ovn-kubernetes(379c61a3-51ff-4bdf-ab8b-5af8bf090716)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:28Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.770631 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.770670 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.770680 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.770716 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.770728 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:28Z","lastTransitionTime":"2025-10-08T07:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.784214 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:28Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.802121 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:28Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.815772 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:28Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.827618 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:28Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.841093 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:28Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.856241 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:28Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.873661 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.873707 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.873722 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.873739 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.873749 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:28Z","lastTransitionTime":"2025-10-08T07:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.874762 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:28Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.888885 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:28Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.903445 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:28Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.975493 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.975534 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.975544 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.975562 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:28 crc kubenswrapper[4693]: I1008 07:17:28.975575 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:28Z","lastTransitionTime":"2025-10-08T07:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.078105 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.078169 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.078187 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.078211 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.078228 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:29Z","lastTransitionTime":"2025-10-08T07:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.180967 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.181041 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.181054 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.181072 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.181113 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:29Z","lastTransitionTime":"2025-10-08T07:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.287900 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.287977 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.288008 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.288034 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.288051 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:29Z","lastTransitionTime":"2025-10-08T07:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.349453 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn"] Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.349909 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.352544 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.352549 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.362149 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.362158 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:29 crc kubenswrapper[4693]: E1008 07:17:29.362386 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.362154 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:29 crc kubenswrapper[4693]: E1008 07:17:29.362952 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:17:29 crc kubenswrapper[4693]: E1008 07:17:29.363084 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.367853 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6773a0911b37e7426179f4daf47411d07b37431c9952fd3322c1a5abfb8cd10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:29Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.389040 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6dfa89c0cadba9b079edef5fc3e802621d10a85bdeaa85f94acc20fb825d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa6dfa89c0cadba9b079edef5fc3e802621d10a85bdeaa85f94acc20fb825d95\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:17:26Z\\\",\\\"message\\\":\\\"de?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:26Z is after 2025-08-24T17:21:41Z]\\\\nI1008 07:17:26.857171 6088 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1008 07:17:26.857158 6088 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-daemon]} name:Service_openshift-machine-config-operator/machine-config-daemon_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.43:8798: 10.217.4.43:9001:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {a36f6289-d09f-43f8-8a8a-c9d2cc11eb0d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1008 07:17:26.857183 6088 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1008 07:17:26.857140 6088 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/contr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-snt7l_openshift-ovn-kubernetes(379c61a3-51ff-4bdf-ab8b-5af8bf090716)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:29Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.390736 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.390773 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.390782 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.390799 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.390829 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:29Z","lastTransitionTime":"2025-10-08T07:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.406425 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:29Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.421773 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:29Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.436371 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:29Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.448263 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:29Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.459309 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:29Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.471394 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:29Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.485083 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:29Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.487651 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/94f930b8-435a-415e-a2a8-cf8d2d04e134-env-overrides\") pod \"ovnkube-control-plane-749d76644c-t49dn\" (UID: \"94f930b8-435a-415e-a2a8-cf8d2d04e134\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.487755 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/94f930b8-435a-415e-a2a8-cf8d2d04e134-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-t49dn\" (UID: \"94f930b8-435a-415e-a2a8-cf8d2d04e134\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.487789 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjzvs\" (UniqueName: \"kubernetes.io/projected/94f930b8-435a-415e-a2a8-cf8d2d04e134-kube-api-access-mjzvs\") pod \"ovnkube-control-plane-749d76644c-t49dn\" (UID: \"94f930b8-435a-415e-a2a8-cf8d2d04e134\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.487919 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/94f930b8-435a-415e-a2a8-cf8d2d04e134-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-t49dn\" (UID: \"94f930b8-435a-415e-a2a8-cf8d2d04e134\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.492861 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.493054 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.493143 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.493239 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.493320 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:29Z","lastTransitionTime":"2025-10-08T07:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.500778 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:29Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.519977 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:29Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.541243 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:29Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.554889 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:29Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.570221 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"94f930b8-435a-415e-a2a8-cf8d2d04e134\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-t49dn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:29Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.588873 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjzvs\" (UniqueName: \"kubernetes.io/projected/94f930b8-435a-415e-a2a8-cf8d2d04e134-kube-api-access-mjzvs\") pod \"ovnkube-control-plane-749d76644c-t49dn\" (UID: \"94f930b8-435a-415e-a2a8-cf8d2d04e134\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.588946 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/94f930b8-435a-415e-a2a8-cf8d2d04e134-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-t49dn\" (UID: \"94f930b8-435a-415e-a2a8-cf8d2d04e134\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.588977 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/94f930b8-435a-415e-a2a8-cf8d2d04e134-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-t49dn\" (UID: \"94f930b8-435a-415e-a2a8-cf8d2d04e134\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.589013 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/94f930b8-435a-415e-a2a8-cf8d2d04e134-env-overrides\") pod \"ovnkube-control-plane-749d76644c-t49dn\" (UID: \"94f930b8-435a-415e-a2a8-cf8d2d04e134\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.589740 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/94f930b8-435a-415e-a2a8-cf8d2d04e134-env-overrides\") pod \"ovnkube-control-plane-749d76644c-t49dn\" (UID: \"94f930b8-435a-415e-a2a8-cf8d2d04e134\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.590016 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/94f930b8-435a-415e-a2a8-cf8d2d04e134-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-t49dn\" (UID: \"94f930b8-435a-415e-a2a8-cf8d2d04e134\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.591690 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:29Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.595622 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.595663 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.595676 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.595695 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.595711 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:29Z","lastTransitionTime":"2025-10-08T07:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.597173 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/94f930b8-435a-415e-a2a8-cf8d2d04e134-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-t49dn\" (UID: \"94f930b8-435a-415e-a2a8-cf8d2d04e134\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.612613 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjzvs\" (UniqueName: \"kubernetes.io/projected/94f930b8-435a-415e-a2a8-cf8d2d04e134-kube-api-access-mjzvs\") pod \"ovnkube-control-plane-749d76644c-t49dn\" (UID: \"94f930b8-435a-415e-a2a8-cf8d2d04e134\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.671191 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" Oct 08 07:17:29 crc kubenswrapper[4693]: W1008 07:17:29.684053 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod94f930b8_435a_415e_a2a8_cf8d2d04e134.slice/crio-c66ff09ed5d5992f7b53858c7d0f18b767295e5023d8483ddb1cff60758f6788 WatchSource:0}: Error finding container c66ff09ed5d5992f7b53858c7d0f18b767295e5023d8483ddb1cff60758f6788: Status 404 returned error can't find the container with id c66ff09ed5d5992f7b53858c7d0f18b767295e5023d8483ddb1cff60758f6788 Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.698630 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.698685 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.698697 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.698719 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.698735 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:29Z","lastTransitionTime":"2025-10-08T07:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.801620 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.801686 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.801697 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.801720 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.801735 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:29Z","lastTransitionTime":"2025-10-08T07:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.904140 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.904173 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.904184 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.904200 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:29 crc kubenswrapper[4693]: I1008 07:17:29.904211 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:29Z","lastTransitionTime":"2025-10-08T07:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.017305 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.017361 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.017375 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.017399 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.017413 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:30Z","lastTransitionTime":"2025-10-08T07:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.094566 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.116881 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.120458 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.120514 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.120531 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.120558 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.120577 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:30Z","lastTransitionTime":"2025-10-08T07:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.141528 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.162345 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.190080 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.203374 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.215191 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"94f930b8-435a-415e-a2a8-cf8d2d04e134\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-t49dn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.223584 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.223620 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.223632 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.223654 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.223667 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:30Z","lastTransitionTime":"2025-10-08T07:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.228477 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.244997 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6773a0911b37e7426179f4daf47411d07b37431c9952fd3322c1a5abfb8cd10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.264798 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6dfa89c0cadba9b079edef5fc3e802621d10a85bdeaa85f94acc20fb825d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa6dfa89c0cadba9b079edef5fc3e802621d10a85bdeaa85f94acc20fb825d95\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:17:26Z\\\",\\\"message\\\":\\\"de?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:26Z is after 2025-08-24T17:21:41Z]\\\\nI1008 07:17:26.857171 6088 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1008 07:17:26.857158 6088 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-daemon]} name:Service_openshift-machine-config-operator/machine-config-daemon_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.43:8798: 10.217.4.43:9001:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {a36f6289-d09f-43f8-8a8a-c9d2cc11eb0d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1008 07:17:26.857183 6088 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1008 07:17:26.857140 6088 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/contr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-snt7l_openshift-ovn-kubernetes(379c61a3-51ff-4bdf-ab8b-5af8bf090716)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.280489 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.299102 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.316931 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.327029 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.327102 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.327117 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.327138 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.327153 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:30Z","lastTransitionTime":"2025-10-08T07:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.333261 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.346402 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.363065 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.430301 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.430365 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.430382 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.430413 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.430437 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:30Z","lastTransitionTime":"2025-10-08T07:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.478282 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-b2lbv"] Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.478969 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:17:30 crc kubenswrapper[4693]: E1008 07:17:30.479061 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.497966 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"94f930b8-435a-415e-a2a8-cf8d2d04e134\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-t49dn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.518976 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.534377 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.534431 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.534454 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.534479 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.534497 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:30Z","lastTransitionTime":"2025-10-08T07:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.536530 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.558507 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6dfa89c0cadba9b079edef5fc3e802621d10a85bdeaa85f94acc20fb825d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa6dfa89c0cadba9b079edef5fc3e802621d10a85bdeaa85f94acc20fb825d95\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:17:26Z\\\",\\\"message\\\":\\\"de?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:26Z is after 2025-08-24T17:21:41Z]\\\\nI1008 07:17:26.857171 6088 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1008 07:17:26.857158 6088 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-daemon]} name:Service_openshift-machine-config-operator/machine-config-daemon_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.43:8798: 10.217.4.43:9001:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {a36f6289-d09f-43f8-8a8a-c9d2cc11eb0d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1008 07:17:26.857183 6088 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1008 07:17:26.857140 6088 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/contr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-snt7l_openshift-ovn-kubernetes(379c61a3-51ff-4bdf-ab8b-5af8bf090716)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.574483 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.593687 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6773a0911b37e7426179f4daf47411d07b37431c9952fd3322c1a5abfb8cd10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.599662 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0f68f540-8d3f-4081-8c7e-cd5023991ada-metrics-certs\") pod \"network-metrics-daemon-b2lbv\" (UID: \"0f68f540-8d3f-4081-8c7e-cd5023991ada\") " pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.599736 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4nhc5\" (UniqueName: \"kubernetes.io/projected/0f68f540-8d3f-4081-8c7e-cd5023991ada-kube-api-access-4nhc5\") pod \"network-metrics-daemon-b2lbv\" (UID: \"0f68f540-8d3f-4081-8c7e-cd5023991ada\") " pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.609363 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.619298 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.629432 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.637313 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.637385 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.637396 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.637412 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.637424 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:30Z","lastTransitionTime":"2025-10-08T07:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.640325 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b2lbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f68f540-8d3f-4081-8c7e-cd5023991ada\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b2lbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.652688 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.664273 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.682516 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.688407 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" event={"ID":"94f930b8-435a-415e-a2a8-cf8d2d04e134","Type":"ContainerStarted","Data":"9eee3983dfb28c7de4e9c0ca587a70b65fec1e7c049e1ebe09e6948917e96da5"} Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.688508 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" event={"ID":"94f930b8-435a-415e-a2a8-cf8d2d04e134","Type":"ContainerStarted","Data":"9f3a2c49f8c7746e20a39b3e78e8664106e022f11c5e988ac314e9332e4f6501"} Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.688531 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" event={"ID":"94f930b8-435a-415e-a2a8-cf8d2d04e134","Type":"ContainerStarted","Data":"c66ff09ed5d5992f7b53858c7d0f18b767295e5023d8483ddb1cff60758f6788"} Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.700288 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.700522 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0f68f540-8d3f-4081-8c7e-cd5023991ada-metrics-certs\") pod \"network-metrics-daemon-b2lbv\" (UID: \"0f68f540-8d3f-4081-8c7e-cd5023991ada\") " pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.700606 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4nhc5\" (UniqueName: \"kubernetes.io/projected/0f68f540-8d3f-4081-8c7e-cd5023991ada-kube-api-access-4nhc5\") pod \"network-metrics-daemon-b2lbv\" (UID: \"0f68f540-8d3f-4081-8c7e-cd5023991ada\") " pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:17:30 crc kubenswrapper[4693]: E1008 07:17:30.700709 4693 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 08 07:17:30 crc kubenswrapper[4693]: E1008 07:17:30.700849 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0f68f540-8d3f-4081-8c7e-cd5023991ada-metrics-certs podName:0f68f540-8d3f-4081-8c7e-cd5023991ada nodeName:}" failed. No retries permitted until 2025-10-08 07:17:31.200790537 +0000 UTC m=+36.571755512 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0f68f540-8d3f-4081-8c7e-cd5023991ada-metrics-certs") pod "network-metrics-daemon-b2lbv" (UID: "0f68f540-8d3f-4081-8c7e-cd5023991ada") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.716190 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.728395 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4nhc5\" (UniqueName: \"kubernetes.io/projected/0f68f540-8d3f-4081-8c7e-cd5023991ada-kube-api-access-4nhc5\") pod \"network-metrics-daemon-b2lbv\" (UID: \"0f68f540-8d3f-4081-8c7e-cd5023991ada\") " pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.733858 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.739597 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.739662 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.739679 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.739706 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.739723 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:30Z","lastTransitionTime":"2025-10-08T07:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.748950 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.771480 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.786178 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.799248 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.811036 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"94f930b8-435a-415e-a2a8-cf8d2d04e134\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f3a2c49f8c7746e20a39b3e78e8664106e022f11c5e988ac314e9332e4f6501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9eee3983dfb28c7de4e9c0ca587a70b65fec1e7c049e1ebe09e6948917e96da5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-t49dn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.824107 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.835588 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.841994 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.842058 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.842083 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.842117 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.842139 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:30Z","lastTransitionTime":"2025-10-08T07:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.860366 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6dfa89c0cadba9b079edef5fc3e802621d10a85bdeaa85f94acc20fb825d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa6dfa89c0cadba9b079edef5fc3e802621d10a85bdeaa85f94acc20fb825d95\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:17:26Z\\\",\\\"message\\\":\\\"de?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:26Z is after 2025-08-24T17:21:41Z]\\\\nI1008 07:17:26.857171 6088 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1008 07:17:26.857158 6088 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-daemon]} name:Service_openshift-machine-config-operator/machine-config-daemon_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.43:8798: 10.217.4.43:9001:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {a36f6289-d09f-43f8-8a8a-c9d2cc11eb0d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1008 07:17:26.857183 6088 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1008 07:17:26.857140 6088 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/contr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-snt7l_openshift-ovn-kubernetes(379c61a3-51ff-4bdf-ab8b-5af8bf090716)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.877597 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.895072 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6773a0911b37e7426179f4daf47411d07b37431c9952fd3322c1a5abfb8cd10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.909475 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.924366 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.936693 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.945227 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.945264 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.945274 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.945291 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.945301 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:30Z","lastTransitionTime":"2025-10-08T07:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.951446 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b2lbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f68f540-8d3f-4081-8c7e-cd5023991ada\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b2lbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.970236 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:30 crc kubenswrapper[4693]: I1008 07:17:30.992212 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:30Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.003489 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:17:31 crc kubenswrapper[4693]: E1008 07:17:31.003726 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:17:47.003686723 +0000 UTC m=+52.374651658 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.047924 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.047973 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.047986 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.048005 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.048040 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:31Z","lastTransitionTime":"2025-10-08T07:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.105486 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.105553 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.105582 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.105630 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:31 crc kubenswrapper[4693]: E1008 07:17:31.105788 4693 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 08 07:17:31 crc kubenswrapper[4693]: E1008 07:17:31.105864 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 08 07:17:31 crc kubenswrapper[4693]: E1008 07:17:31.105889 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 08 07:17:31 crc kubenswrapper[4693]: E1008 07:17:31.105902 4693 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:31 crc kubenswrapper[4693]: E1008 07:17:31.105922 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-08 07:17:47.105893581 +0000 UTC m=+52.476858706 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 08 07:17:31 crc kubenswrapper[4693]: E1008 07:17:31.105927 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 08 07:17:31 crc kubenswrapper[4693]: E1008 07:17:31.105963 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-08 07:17:47.105945953 +0000 UTC m=+52.476910888 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:31 crc kubenswrapper[4693]: E1008 07:17:31.105972 4693 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 08 07:17:31 crc kubenswrapper[4693]: E1008 07:17:31.106021 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-08 07:17:47.106008855 +0000 UTC m=+52.476973790 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 08 07:17:31 crc kubenswrapper[4693]: E1008 07:17:31.105966 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 08 07:17:31 crc kubenswrapper[4693]: E1008 07:17:31.106048 4693 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:31 crc kubenswrapper[4693]: E1008 07:17:31.106076 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-08 07:17:47.106069706 +0000 UTC m=+52.477034641 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.151029 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.151095 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.151116 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.151146 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.151170 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:31Z","lastTransitionTime":"2025-10-08T07:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.207197 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0f68f540-8d3f-4081-8c7e-cd5023991ada-metrics-certs\") pod \"network-metrics-daemon-b2lbv\" (UID: \"0f68f540-8d3f-4081-8c7e-cd5023991ada\") " pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:17:31 crc kubenswrapper[4693]: E1008 07:17:31.207462 4693 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 08 07:17:31 crc kubenswrapper[4693]: E1008 07:17:31.207602 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0f68f540-8d3f-4081-8c7e-cd5023991ada-metrics-certs podName:0f68f540-8d3f-4081-8c7e-cd5023991ada nodeName:}" failed. No retries permitted until 2025-10-08 07:17:32.207576946 +0000 UTC m=+37.578541871 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0f68f540-8d3f-4081-8c7e-cd5023991ada-metrics-certs") pod "network-metrics-daemon-b2lbv" (UID: "0f68f540-8d3f-4081-8c7e-cd5023991ada") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.253946 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.254001 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.254012 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.254036 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.254049 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:31Z","lastTransitionTime":"2025-10-08T07:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.269259 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.269316 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.269334 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.269357 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.269373 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:31Z","lastTransitionTime":"2025-10-08T07:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:31 crc kubenswrapper[4693]: E1008 07:17:31.293187 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:31Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.298848 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.298913 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.298931 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.298961 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.298990 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:31Z","lastTransitionTime":"2025-10-08T07:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:31 crc kubenswrapper[4693]: E1008 07:17:31.315998 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:31Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.321241 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.321305 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.321324 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.321353 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.321376 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:31Z","lastTransitionTime":"2025-10-08T07:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:31 crc kubenswrapper[4693]: E1008 07:17:31.340720 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:31Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.345291 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.345349 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.345366 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.345392 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.345412 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:31Z","lastTransitionTime":"2025-10-08T07:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.362884 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.362979 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.363025 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:31 crc kubenswrapper[4693]: E1008 07:17:31.362953 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:31Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:31 crc kubenswrapper[4693]: E1008 07:17:31.363145 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:17:31 crc kubenswrapper[4693]: E1008 07:17:31.363388 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:17:31 crc kubenswrapper[4693]: E1008 07:17:31.363573 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.369061 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.369127 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.369143 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.369167 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.369182 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:31Z","lastTransitionTime":"2025-10-08T07:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:31 crc kubenswrapper[4693]: E1008 07:17:31.388842 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:31Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:31 crc kubenswrapper[4693]: E1008 07:17:31.388973 4693 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.391652 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.391718 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.391735 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.391771 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.391792 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:31Z","lastTransitionTime":"2025-10-08T07:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.495165 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.495525 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.495536 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.495556 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.495568 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:31Z","lastTransitionTime":"2025-10-08T07:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.599112 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.599186 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.599204 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.599230 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.599252 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:31Z","lastTransitionTime":"2025-10-08T07:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.702407 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.702458 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.702476 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.702500 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.702519 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:31Z","lastTransitionTime":"2025-10-08T07:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.805938 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.806023 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.806043 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.806069 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.806088 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:31Z","lastTransitionTime":"2025-10-08T07:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.909036 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.909107 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.909125 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.909149 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:31 crc kubenswrapper[4693]: I1008 07:17:31.909166 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:31Z","lastTransitionTime":"2025-10-08T07:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.012344 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.012416 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.012434 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.012460 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.012481 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:32Z","lastTransitionTime":"2025-10-08T07:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.115683 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.115738 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.115753 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.115772 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.115784 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:32Z","lastTransitionTime":"2025-10-08T07:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.216919 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0f68f540-8d3f-4081-8c7e-cd5023991ada-metrics-certs\") pod \"network-metrics-daemon-b2lbv\" (UID: \"0f68f540-8d3f-4081-8c7e-cd5023991ada\") " pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:17:32 crc kubenswrapper[4693]: E1008 07:17:32.217153 4693 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 08 07:17:32 crc kubenswrapper[4693]: E1008 07:17:32.217244 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0f68f540-8d3f-4081-8c7e-cd5023991ada-metrics-certs podName:0f68f540-8d3f-4081-8c7e-cd5023991ada nodeName:}" failed. No retries permitted until 2025-10-08 07:17:34.217216267 +0000 UTC m=+39.588181242 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0f68f540-8d3f-4081-8c7e-cd5023991ada-metrics-certs") pod "network-metrics-daemon-b2lbv" (UID: "0f68f540-8d3f-4081-8c7e-cd5023991ada") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.218331 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.218388 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.218399 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.218418 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.218432 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:32Z","lastTransitionTime":"2025-10-08T07:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.320747 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.320809 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.320862 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.320888 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.320907 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:32Z","lastTransitionTime":"2025-10-08T07:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.362502 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:17:32 crc kubenswrapper[4693]: E1008 07:17:32.362717 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.423742 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.423800 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.423848 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.423879 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.423905 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:32Z","lastTransitionTime":"2025-10-08T07:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.526492 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.526545 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.526556 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.526576 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.526592 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:32Z","lastTransitionTime":"2025-10-08T07:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.629540 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.629591 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.629619 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.629635 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.629646 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:32Z","lastTransitionTime":"2025-10-08T07:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.732265 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.732344 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.732369 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.732400 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.732450 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:32Z","lastTransitionTime":"2025-10-08T07:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.835078 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.835128 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.835144 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.835162 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.835172 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:32Z","lastTransitionTime":"2025-10-08T07:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.938230 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.938290 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.938308 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.938331 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:32 crc kubenswrapper[4693]: I1008 07:17:32.938348 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:32Z","lastTransitionTime":"2025-10-08T07:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.041161 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.041222 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.041243 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.041268 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.041286 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:33Z","lastTransitionTime":"2025-10-08T07:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.143773 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.143891 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.143910 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.143938 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.143957 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:33Z","lastTransitionTime":"2025-10-08T07:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.247154 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.247204 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.247221 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.247243 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.247267 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:33Z","lastTransitionTime":"2025-10-08T07:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.350990 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.351058 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.351090 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.351116 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.351129 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:33Z","lastTransitionTime":"2025-10-08T07:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.362904 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.362976 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.362901 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:33 crc kubenswrapper[4693]: E1008 07:17:33.363066 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:17:33 crc kubenswrapper[4693]: E1008 07:17:33.363333 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:17:33 crc kubenswrapper[4693]: E1008 07:17:33.363481 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.454200 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.454260 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.454278 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.454304 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.454321 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:33Z","lastTransitionTime":"2025-10-08T07:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.557291 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.557357 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.557379 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.557410 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.557433 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:33Z","lastTransitionTime":"2025-10-08T07:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.659559 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.659617 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.659638 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.659662 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.659677 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:33Z","lastTransitionTime":"2025-10-08T07:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.762109 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.762193 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.762214 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.762244 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.762268 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:33Z","lastTransitionTime":"2025-10-08T07:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.865415 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.865461 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.865469 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.865486 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.865496 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:33Z","lastTransitionTime":"2025-10-08T07:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.968641 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.968678 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.968686 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.968702 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:33 crc kubenswrapper[4693]: I1008 07:17:33.968711 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:33Z","lastTransitionTime":"2025-10-08T07:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.071067 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.071095 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.071103 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.071117 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.071128 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:34Z","lastTransitionTime":"2025-10-08T07:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.174003 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.174062 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.174079 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.174103 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.174121 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:34Z","lastTransitionTime":"2025-10-08T07:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.244593 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0f68f540-8d3f-4081-8c7e-cd5023991ada-metrics-certs\") pod \"network-metrics-daemon-b2lbv\" (UID: \"0f68f540-8d3f-4081-8c7e-cd5023991ada\") " pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:17:34 crc kubenswrapper[4693]: E1008 07:17:34.244748 4693 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 08 07:17:34 crc kubenswrapper[4693]: E1008 07:17:34.244826 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0f68f540-8d3f-4081-8c7e-cd5023991ada-metrics-certs podName:0f68f540-8d3f-4081-8c7e-cd5023991ada nodeName:}" failed. No retries permitted until 2025-10-08 07:17:38.244801802 +0000 UTC m=+43.615766737 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0f68f540-8d3f-4081-8c7e-cd5023991ada-metrics-certs") pod "network-metrics-daemon-b2lbv" (UID: "0f68f540-8d3f-4081-8c7e-cd5023991ada") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.277223 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.277267 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.277277 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.277295 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.277305 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:34Z","lastTransitionTime":"2025-10-08T07:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.362977 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:17:34 crc kubenswrapper[4693]: E1008 07:17:34.363181 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.379901 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.379989 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.380017 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.380126 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.380154 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:34Z","lastTransitionTime":"2025-10-08T07:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.482901 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.482937 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.482945 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.482961 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.482971 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:34Z","lastTransitionTime":"2025-10-08T07:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.585450 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.585486 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.585495 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.585509 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.585518 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:34Z","lastTransitionTime":"2025-10-08T07:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.688200 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.688265 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.688282 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.688308 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.688325 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:34Z","lastTransitionTime":"2025-10-08T07:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.790558 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.790605 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.790615 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.790632 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.790646 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:34Z","lastTransitionTime":"2025-10-08T07:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.893124 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.893190 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.893207 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.893233 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.893252 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:34Z","lastTransitionTime":"2025-10-08T07:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.996489 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.996571 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.996597 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.996637 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:34 crc kubenswrapper[4693]: I1008 07:17:34.996661 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:34Z","lastTransitionTime":"2025-10-08T07:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.099495 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.099594 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.099612 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.099637 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.099655 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:35Z","lastTransitionTime":"2025-10-08T07:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.202261 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.202337 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.202364 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.202392 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.202409 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:35Z","lastTransitionTime":"2025-10-08T07:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.305043 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.305097 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.305123 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.305141 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.305152 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:35Z","lastTransitionTime":"2025-10-08T07:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.362678 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.362750 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:35 crc kubenswrapper[4693]: E1008 07:17:35.362867 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.362914 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:35 crc kubenswrapper[4693]: E1008 07:17:35.363042 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:17:35 crc kubenswrapper[4693]: E1008 07:17:35.363236 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.376517 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:35Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.391694 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6773a0911b37e7426179f4daf47411d07b37431c9952fd3322c1a5abfb8cd10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:35Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.408567 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.408639 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.408651 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.408669 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.408680 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:35Z","lastTransitionTime":"2025-10-08T07:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.413559 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6dfa89c0cadba9b079edef5fc3e802621d10a85bdeaa85f94acc20fb825d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa6dfa89c0cadba9b079edef5fc3e802621d10a85bdeaa85f94acc20fb825d95\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:17:26Z\\\",\\\"message\\\":\\\"de?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:26Z is after 2025-08-24T17:21:41Z]\\\\nI1008 07:17:26.857171 6088 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1008 07:17:26.857158 6088 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-daemon]} name:Service_openshift-machine-config-operator/machine-config-daemon_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.43:8798: 10.217.4.43:9001:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {a36f6289-d09f-43f8-8a8a-c9d2cc11eb0d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1008 07:17:26.857183 6088 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1008 07:17:26.857140 6088 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/contr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-snt7l_openshift-ovn-kubernetes(379c61a3-51ff-4bdf-ab8b-5af8bf090716)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:35Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.433925 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:35Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.454968 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:35Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.474783 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:35Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.489195 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:35Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.503957 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:35Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.511548 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.511618 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.511638 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.511665 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.511683 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:35Z","lastTransitionTime":"2025-10-08T07:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.518729 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b2lbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f68f540-8d3f-4081-8c7e-cd5023991ada\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b2lbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:35Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.532985 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:35Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.552838 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:35Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.567292 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:35Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.580897 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:35Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.595185 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:35Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.608062 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:35Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.614375 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.614424 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.614438 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.614460 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.614473 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:35Z","lastTransitionTime":"2025-10-08T07:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.620538 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"94f930b8-435a-415e-a2a8-cf8d2d04e134\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f3a2c49f8c7746e20a39b3e78e8664106e022f11c5e988ac314e9332e4f6501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9eee3983dfb28c7de4e9c0ca587a70b65fec1e7c049e1ebe09e6948917e96da5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-t49dn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:35Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.717381 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.717670 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.717753 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.717901 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.717978 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:35Z","lastTransitionTime":"2025-10-08T07:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.820336 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.820413 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.820427 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.820448 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.820460 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:35Z","lastTransitionTime":"2025-10-08T07:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.923021 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.923078 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.923089 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.923109 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:35 crc kubenswrapper[4693]: I1008 07:17:35.923123 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:35Z","lastTransitionTime":"2025-10-08T07:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.026053 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.026111 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.026128 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.026150 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.026167 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:36Z","lastTransitionTime":"2025-10-08T07:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.129316 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.129367 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.129377 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.129397 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.129409 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:36Z","lastTransitionTime":"2025-10-08T07:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.231842 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.231884 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.231894 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.231911 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.231923 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:36Z","lastTransitionTime":"2025-10-08T07:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.334995 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.335077 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.335095 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.335129 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.335179 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:36Z","lastTransitionTime":"2025-10-08T07:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.362147 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:17:36 crc kubenswrapper[4693]: E1008 07:17:36.362311 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.437534 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.437591 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.437604 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.437622 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.437634 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:36Z","lastTransitionTime":"2025-10-08T07:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.540100 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.540153 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.540167 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.540189 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.540203 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:36Z","lastTransitionTime":"2025-10-08T07:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.643492 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.643542 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.643553 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.643573 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.643585 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:36Z","lastTransitionTime":"2025-10-08T07:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.745896 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.745945 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.745957 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.745973 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.745987 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:36Z","lastTransitionTime":"2025-10-08T07:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.849451 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.849518 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.849540 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.849569 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.849593 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:36Z","lastTransitionTime":"2025-10-08T07:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.952711 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.952750 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.952759 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.952828 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:36 crc kubenswrapper[4693]: I1008 07:17:36.952840 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:36Z","lastTransitionTime":"2025-10-08T07:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.056088 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.056136 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.056152 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.056178 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.056195 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:37Z","lastTransitionTime":"2025-10-08T07:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.159223 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.159263 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.159276 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.159294 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.159306 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:37Z","lastTransitionTime":"2025-10-08T07:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.261327 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.261424 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.261448 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.261478 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.261499 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:37Z","lastTransitionTime":"2025-10-08T07:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.362736 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.362797 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:37 crc kubenswrapper[4693]: E1008 07:17:37.362893 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.362939 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:37 crc kubenswrapper[4693]: E1008 07:17:37.363053 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:17:37 crc kubenswrapper[4693]: E1008 07:17:37.363174 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.364489 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.364576 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.364593 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.364625 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.364658 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:37Z","lastTransitionTime":"2025-10-08T07:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.467718 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.467776 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.467793 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.467852 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.467871 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:37Z","lastTransitionTime":"2025-10-08T07:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.570359 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.570420 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.570438 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.570464 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.570482 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:37Z","lastTransitionTime":"2025-10-08T07:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.673056 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.673379 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.673554 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.673705 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.673838 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:37Z","lastTransitionTime":"2025-10-08T07:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.776989 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.777040 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.777059 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.777083 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.777102 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:37Z","lastTransitionTime":"2025-10-08T07:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.879498 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.879557 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.879574 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.879600 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.879625 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:37Z","lastTransitionTime":"2025-10-08T07:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.981646 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.981697 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.981707 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.981727 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:37 crc kubenswrapper[4693]: I1008 07:17:37.981736 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:37Z","lastTransitionTime":"2025-10-08T07:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.084395 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.084464 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.084486 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.084518 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.084541 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:38Z","lastTransitionTime":"2025-10-08T07:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.187802 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.187874 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.187883 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.187904 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.187916 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:38Z","lastTransitionTime":"2025-10-08T07:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.290189 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0f68f540-8d3f-4081-8c7e-cd5023991ada-metrics-certs\") pod \"network-metrics-daemon-b2lbv\" (UID: \"0f68f540-8d3f-4081-8c7e-cd5023991ada\") " pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:17:38 crc kubenswrapper[4693]: E1008 07:17:38.290340 4693 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 08 07:17:38 crc kubenswrapper[4693]: E1008 07:17:38.290400 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0f68f540-8d3f-4081-8c7e-cd5023991ada-metrics-certs podName:0f68f540-8d3f-4081-8c7e-cd5023991ada nodeName:}" failed. No retries permitted until 2025-10-08 07:17:46.290380927 +0000 UTC m=+51.661345862 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0f68f540-8d3f-4081-8c7e-cd5023991ada-metrics-certs") pod "network-metrics-daemon-b2lbv" (UID: "0f68f540-8d3f-4081-8c7e-cd5023991ada") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.290870 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.291008 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.291124 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.291234 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.291321 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:38Z","lastTransitionTime":"2025-10-08T07:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.362626 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:17:38 crc kubenswrapper[4693]: E1008 07:17:38.362855 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.393964 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.394020 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.394038 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.394067 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.394085 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:38Z","lastTransitionTime":"2025-10-08T07:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.496696 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.496736 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.496745 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.496763 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.496776 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:38Z","lastTransitionTime":"2025-10-08T07:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.599951 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.600001 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.600012 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.600034 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.600046 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:38Z","lastTransitionTime":"2025-10-08T07:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.702632 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.702684 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.702695 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.702716 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.702728 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:38Z","lastTransitionTime":"2025-10-08T07:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.805160 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.805227 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.805241 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.805265 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.805280 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:38Z","lastTransitionTime":"2025-10-08T07:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.908419 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.908466 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.908484 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.908511 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:38 crc kubenswrapper[4693]: I1008 07:17:38.908529 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:38Z","lastTransitionTime":"2025-10-08T07:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.010893 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.010993 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.011011 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.011039 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.011056 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:39Z","lastTransitionTime":"2025-10-08T07:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.113468 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.113555 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.113576 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.113604 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.113623 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:39Z","lastTransitionTime":"2025-10-08T07:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.216780 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.216877 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.216893 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.216917 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.216938 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:39Z","lastTransitionTime":"2025-10-08T07:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.320159 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.320229 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.320251 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.320285 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.320306 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:39Z","lastTransitionTime":"2025-10-08T07:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.362074 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.362181 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:39 crc kubenswrapper[4693]: E1008 07:17:39.362288 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:17:39 crc kubenswrapper[4693]: E1008 07:17:39.362547 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.362418 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:39 crc kubenswrapper[4693]: E1008 07:17:39.362699 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.422890 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.422955 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.422973 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.423021 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.423038 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:39Z","lastTransitionTime":"2025-10-08T07:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.525648 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.525711 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.525733 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.525761 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.525779 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:39Z","lastTransitionTime":"2025-10-08T07:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.628473 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.628516 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.628527 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.628546 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.628557 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:39Z","lastTransitionTime":"2025-10-08T07:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.731092 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.731138 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.731148 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.731166 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.731178 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:39Z","lastTransitionTime":"2025-10-08T07:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.834200 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.834236 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.834246 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.834263 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.834273 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:39Z","lastTransitionTime":"2025-10-08T07:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.937924 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.937983 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.938001 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.938028 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:39 crc kubenswrapper[4693]: I1008 07:17:39.938046 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:39Z","lastTransitionTime":"2025-10-08T07:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.041026 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.041088 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.041106 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.041132 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.041149 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:40Z","lastTransitionTime":"2025-10-08T07:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.144888 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.144982 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.145001 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.145027 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.145044 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:40Z","lastTransitionTime":"2025-10-08T07:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.248428 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.248473 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.248485 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.248503 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.248515 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:40Z","lastTransitionTime":"2025-10-08T07:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.351450 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.351502 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.351524 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.351555 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.351576 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:40Z","lastTransitionTime":"2025-10-08T07:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.361915 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:17:40 crc kubenswrapper[4693]: E1008 07:17:40.362104 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.454287 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.454340 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.454356 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.454382 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.454400 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:40Z","lastTransitionTime":"2025-10-08T07:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.557168 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.557230 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.557253 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.557284 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.557306 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:40Z","lastTransitionTime":"2025-10-08T07:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.659907 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.659952 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.659969 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.659994 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.660011 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:40Z","lastTransitionTime":"2025-10-08T07:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.762546 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.762606 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.762624 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.762651 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.762669 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:40Z","lastTransitionTime":"2025-10-08T07:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.865937 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.865983 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.865998 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.866021 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.866037 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:40Z","lastTransitionTime":"2025-10-08T07:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.968621 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.968725 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.968742 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.968767 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:40 crc kubenswrapper[4693]: I1008 07:17:40.968786 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:40Z","lastTransitionTime":"2025-10-08T07:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.071660 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.071705 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.071716 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.071733 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.071744 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:41Z","lastTransitionTime":"2025-10-08T07:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.174457 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.174505 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.174516 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.174535 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.174545 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:41Z","lastTransitionTime":"2025-10-08T07:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.277380 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.277443 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.277460 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.277486 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.277507 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:41Z","lastTransitionTime":"2025-10-08T07:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.362425 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:41 crc kubenswrapper[4693]: E1008 07:17:41.362568 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.362426 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.362443 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:41 crc kubenswrapper[4693]: E1008 07:17:41.362737 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:17:41 crc kubenswrapper[4693]: E1008 07:17:41.362783 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.379718 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.379813 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.379839 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.379858 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.379870 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:41Z","lastTransitionTime":"2025-10-08T07:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.404519 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.404584 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.404602 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.404628 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.404646 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:41Z","lastTransitionTime":"2025-10-08T07:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:41 crc kubenswrapper[4693]: E1008 07:17:41.423852 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:41Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.428313 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.428439 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.428465 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.428499 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.428521 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:41Z","lastTransitionTime":"2025-10-08T07:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:41 crc kubenswrapper[4693]: E1008 07:17:41.447178 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:41Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.452061 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.452132 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.452151 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.452179 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.452197 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:41Z","lastTransitionTime":"2025-10-08T07:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:41 crc kubenswrapper[4693]: E1008 07:17:41.470759 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:41Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.475668 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.475729 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.475746 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.475770 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.475788 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:41Z","lastTransitionTime":"2025-10-08T07:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:41 crc kubenswrapper[4693]: E1008 07:17:41.494181 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:41Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.499054 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.499108 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.499125 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.499152 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.499168 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:41Z","lastTransitionTime":"2025-10-08T07:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:41 crc kubenswrapper[4693]: E1008 07:17:41.517574 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:41Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:41 crc kubenswrapper[4693]: E1008 07:17:41.517805 4693 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.519905 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.519955 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.519973 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.520000 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.520017 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:41Z","lastTransitionTime":"2025-10-08T07:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.629199 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.629278 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.629303 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.629331 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.629354 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:41Z","lastTransitionTime":"2025-10-08T07:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.731506 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.731570 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.731591 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.731619 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.731642 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:41Z","lastTransitionTime":"2025-10-08T07:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.834475 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.834542 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.834563 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.834589 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.834607 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:41Z","lastTransitionTime":"2025-10-08T07:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.938273 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.938321 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.938330 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.938347 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:41 crc kubenswrapper[4693]: I1008 07:17:41.938358 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:41Z","lastTransitionTime":"2025-10-08T07:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.040391 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.040452 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.040474 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.040503 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.040525 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:42Z","lastTransitionTime":"2025-10-08T07:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.158516 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.158551 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.158561 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.158575 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.158585 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:42Z","lastTransitionTime":"2025-10-08T07:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.260918 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.260960 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.260970 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.260990 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.261000 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:42Z","lastTransitionTime":"2025-10-08T07:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.361916 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:17:42 crc kubenswrapper[4693]: E1008 07:17:42.362069 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.363176 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.363223 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.363238 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.363260 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.363273 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:42Z","lastTransitionTime":"2025-10-08T07:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.465354 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.465434 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.465459 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.465491 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.465516 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:42Z","lastTransitionTime":"2025-10-08T07:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.567770 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.567840 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.567851 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.567872 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.567882 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:42Z","lastTransitionTime":"2025-10-08T07:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.670721 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.670780 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.670794 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.670856 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.670878 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:42Z","lastTransitionTime":"2025-10-08T07:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.773958 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.774000 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.774012 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.774033 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.774048 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:42Z","lastTransitionTime":"2025-10-08T07:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.876499 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.876570 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.876589 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.876617 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.876634 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:42Z","lastTransitionTime":"2025-10-08T07:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.980620 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.980742 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.980810 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.980880 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:42 crc kubenswrapper[4693]: I1008 07:17:42.980904 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:42Z","lastTransitionTime":"2025-10-08T07:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.083687 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.083734 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.083748 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.083764 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.083773 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:43Z","lastTransitionTime":"2025-10-08T07:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.187634 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.187689 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.187709 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.187731 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.187744 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:43Z","lastTransitionTime":"2025-10-08T07:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.291155 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.291237 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.291265 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.291298 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.291321 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:43Z","lastTransitionTime":"2025-10-08T07:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.362025 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.362200 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:43 crc kubenswrapper[4693]: E1008 07:17:43.362359 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.362400 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:43 crc kubenswrapper[4693]: E1008 07:17:43.362588 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:17:43 crc kubenswrapper[4693]: E1008 07:17:43.363175 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.363729 4693 scope.go:117] "RemoveContainer" containerID="aa6dfa89c0cadba9b079edef5fc3e802621d10a85bdeaa85f94acc20fb825d95" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.394926 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.395045 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.395106 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.395140 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.395162 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:43Z","lastTransitionTime":"2025-10-08T07:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.499432 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.499554 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.499581 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.499616 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.499642 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:43Z","lastTransitionTime":"2025-10-08T07:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.602687 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.603274 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.603292 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.603322 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.603340 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:43Z","lastTransitionTime":"2025-10-08T07:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.671251 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.687204 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:43Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.687309 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.703856 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b2lbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f68f540-8d3f-4081-8c7e-cd5023991ada\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b2lbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:43Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.708545 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.708600 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.708616 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.708641 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.708659 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:43Z","lastTransitionTime":"2025-10-08T07:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.723398 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:43Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.741966 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:43Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.743050 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-snt7l_379c61a3-51ff-4bdf-ab8b-5af8bf090716/ovnkube-controller/1.log" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.747552 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" event={"ID":"379c61a3-51ff-4bdf-ab8b-5af8bf090716","Type":"ContainerStarted","Data":"884b809f73bb25122c848e6529f36ca0a71a8622cf6fc4be1d763957d1060107"} Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.748395 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.760731 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:43Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.773800 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:43Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.790737 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:43Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.808470 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:43Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.812153 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.812211 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.812229 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.812257 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.812275 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:43Z","lastTransitionTime":"2025-10-08T07:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.823743 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:43Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.838322 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:43Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.860628 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:43Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.874468 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:43Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.891113 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"94f930b8-435a-415e-a2a8-cf8d2d04e134\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f3a2c49f8c7746e20a39b3e78e8664106e022f11c5e988ac314e9332e4f6501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9eee3983dfb28c7de4e9c0ca587a70b65fec1e7c049e1ebe09e6948917e96da5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-t49dn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:43Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.908564 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:43Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.914593 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.914636 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.914648 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.914665 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.914677 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:43Z","lastTransitionTime":"2025-10-08T07:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.926364 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6773a0911b37e7426179f4daf47411d07b37431c9952fd3322c1a5abfb8cd10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:43Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.953736 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6dfa89c0cadba9b079edef5fc3e802621d10a85bdeaa85f94acc20fb825d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa6dfa89c0cadba9b079edef5fc3e802621d10a85bdeaa85f94acc20fb825d95\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:17:26Z\\\",\\\"message\\\":\\\"de?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:26Z is after 2025-08-24T17:21:41Z]\\\\nI1008 07:17:26.857171 6088 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1008 07:17:26.857158 6088 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-daemon]} name:Service_openshift-machine-config-operator/machine-config-daemon_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.43:8798: 10.217.4.43:9001:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {a36f6289-d09f-43f8-8a8a-c9d2cc11eb0d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1008 07:17:26.857183 6088 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1008 07:17:26.857140 6088 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/contr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-snt7l_openshift-ovn-kubernetes(379c61a3-51ff-4bdf-ab8b-5af8bf090716)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:43Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.968976 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:43Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:43 crc kubenswrapper[4693]: I1008 07:17:43.984312 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6773a0911b37e7426179f4daf47411d07b37431c9952fd3322c1a5abfb8cd10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:43Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.001333 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://884b809f73bb25122c848e6529f36ca0a71a8622cf6fc4be1d763957d1060107\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa6dfa89c0cadba9b079edef5fc3e802621d10a85bdeaa85f94acc20fb825d95\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:17:26Z\\\",\\\"message\\\":\\\"de?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:26Z is after 2025-08-24T17:21:41Z]\\\\nI1008 07:17:26.857171 6088 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1008 07:17:26.857158 6088 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-daemon]} name:Service_openshift-machine-config-operator/machine-config-daemon_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.43:8798: 10.217.4.43:9001:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {a36f6289-d09f-43f8-8a8a-c9d2cc11eb0d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1008 07:17:26.857183 6088 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1008 07:17:26.857140 6088 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/contr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:43Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.013042 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:44Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.016745 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.016783 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.016791 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.016807 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.016835 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:44Z","lastTransitionTime":"2025-10-08T07:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.026777 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:44Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.041876 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:44Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.052067 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:44Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.061593 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:44Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.072748 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b2lbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f68f540-8d3f-4081-8c7e-cd5023991ada\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b2lbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:44Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.091357 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:44Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.117215 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:44Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.119363 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.119415 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.119429 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.119455 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.119471 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:44Z","lastTransitionTime":"2025-10-08T07:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.134063 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:44Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.150098 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:44Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.164583 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac29deaa-d09f-4987-a04b-0b8f4188543c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7e1786f78d12b347add638d55c544fc801321848b51bf78229917eca9bf4441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd5c17f3c2d3acc27a1313d24b59f0fad8c0b57bdd789587fee6e4821bb8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b5cb5fac06e68b9604fb24cff750293a0545225bfecf0e60f748a1dde8de32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:44Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.178506 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:44Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.194296 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:44Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.208569 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"94f930b8-435a-415e-a2a8-cf8d2d04e134\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f3a2c49f8c7746e20a39b3e78e8664106e022f11c5e988ac314e9332e4f6501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9eee3983dfb28c7de4e9c0ca587a70b65fec1e7c049e1ebe09e6948917e96da5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-t49dn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:44Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.222894 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.222956 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.222967 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.222986 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.223001 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:44Z","lastTransitionTime":"2025-10-08T07:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.325500 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.325545 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.325574 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.325593 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.325604 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:44Z","lastTransitionTime":"2025-10-08T07:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.362471 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:17:44 crc kubenswrapper[4693]: E1008 07:17:44.362662 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.428918 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.428972 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.428982 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.429003 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.429014 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:44Z","lastTransitionTime":"2025-10-08T07:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.532900 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.533068 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.533093 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.533129 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.533152 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:44Z","lastTransitionTime":"2025-10-08T07:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.637446 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.637538 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.637563 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.637604 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.637624 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:44Z","lastTransitionTime":"2025-10-08T07:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.741071 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.741182 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.741203 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.741227 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.741267 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:44Z","lastTransitionTime":"2025-10-08T07:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.753548 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-snt7l_379c61a3-51ff-4bdf-ab8b-5af8bf090716/ovnkube-controller/2.log" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.754237 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-snt7l_379c61a3-51ff-4bdf-ab8b-5af8bf090716/ovnkube-controller/1.log" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.757950 4693 generic.go:334] "Generic (PLEG): container finished" podID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerID="884b809f73bb25122c848e6529f36ca0a71a8622cf6fc4be1d763957d1060107" exitCode=1 Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.758037 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" event={"ID":"379c61a3-51ff-4bdf-ab8b-5af8bf090716","Type":"ContainerDied","Data":"884b809f73bb25122c848e6529f36ca0a71a8622cf6fc4be1d763957d1060107"} Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.758195 4693 scope.go:117] "RemoveContainer" containerID="aa6dfa89c0cadba9b079edef5fc3e802621d10a85bdeaa85f94acc20fb825d95" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.759143 4693 scope.go:117] "RemoveContainer" containerID="884b809f73bb25122c848e6529f36ca0a71a8622cf6fc4be1d763957d1060107" Oct 08 07:17:44 crc kubenswrapper[4693]: E1008 07:17:44.759380 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-snt7l_openshift-ovn-kubernetes(379c61a3-51ff-4bdf-ab8b-5af8bf090716)\"" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.778228 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:44Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.795019 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"94f930b8-435a-415e-a2a8-cf8d2d04e134\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f3a2c49f8c7746e20a39b3e78e8664106e022f11c5e988ac314e9332e4f6501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9eee3983dfb28c7de4e9c0ca587a70b65fec1e7c049e1ebe09e6948917e96da5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-t49dn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:44Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.809388 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac29deaa-d09f-4987-a04b-0b8f4188543c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7e1786f78d12b347add638d55c544fc801321848b51bf78229917eca9bf4441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd5c17f3c2d3acc27a1313d24b59f0fad8c0b57bdd789587fee6e4821bb8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b5cb5fac06e68b9604fb24cff750293a0545225bfecf0e60f748a1dde8de32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:44Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.830949 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:44Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.844457 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.844551 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.844591 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.844638 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.844664 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:44Z","lastTransitionTime":"2025-10-08T07:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.856566 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6773a0911b37e7426179f4daf47411d07b37431c9952fd3322c1a5abfb8cd10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:44Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.879873 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://884b809f73bb25122c848e6529f36ca0a71a8622cf6fc4be1d763957d1060107\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa6dfa89c0cadba9b079edef5fc3e802621d10a85bdeaa85f94acc20fb825d95\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:17:26Z\\\",\\\"message\\\":\\\"de?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:26Z is after 2025-08-24T17:21:41Z]\\\\nI1008 07:17:26.857171 6088 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1008 07:17:26.857158 6088 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-daemon]} name:Service_openshift-machine-config-operator/machine-config-daemon_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.43:8798: 10.217.4.43:9001:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {a36f6289-d09f-43f8-8a8a-c9d2cc11eb0d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1008 07:17:26.857183 6088 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1008 07:17:26.857140 6088 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/contr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://884b809f73bb25122c848e6529f36ca0a71a8622cf6fc4be1d763957d1060107\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:17:44Z\\\",\\\"message\\\":\\\"V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1008 07:17:44.391870 6302 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1008 07:17:44.391862 6302 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/community-operators]} name:Service_openshift-marketplace/community-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.189:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d389393c-7ba9-422c-b3f5-06e391d537d2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1008 07:17:44.391894 6302 services_controller.go:445] Built service openshift-network-console/networking-console-plugin LB template configs for network=default: []services.lbConfig(nil)\\\\nF1008 07:17:44.391901 6302 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:44Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.900233 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:44Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.917583 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:44Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.935672 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:44Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.947132 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.947201 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.947216 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.947239 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.947252 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:44Z","lastTransitionTime":"2025-10-08T07:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.951923 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:44Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.966558 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:44Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:44 crc kubenswrapper[4693]: I1008 07:17:44.982733 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b2lbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f68f540-8d3f-4081-8c7e-cd5023991ada\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b2lbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:44Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.001542 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:44Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.020886 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.042185 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.049355 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.049429 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.049454 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.049483 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.049500 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:45Z","lastTransitionTime":"2025-10-08T07:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.062499 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.085380 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.152190 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.152232 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.152243 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.152265 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.152277 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:45Z","lastTransitionTime":"2025-10-08T07:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.255411 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.255466 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.255477 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.255499 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.255512 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:45Z","lastTransitionTime":"2025-10-08T07:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.358116 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.358178 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.358197 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.358226 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.358244 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:45Z","lastTransitionTime":"2025-10-08T07:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.362527 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.362569 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:45 crc kubenswrapper[4693]: E1008 07:17:45.362692 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.362761 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:45 crc kubenswrapper[4693]: E1008 07:17:45.363160 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:17:45 crc kubenswrapper[4693]: E1008 07:17:45.363258 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.389966 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6773a0911b37e7426179f4daf47411d07b37431c9952fd3322c1a5abfb8cd10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.419244 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://884b809f73bb25122c848e6529f36ca0a71a8622cf6fc4be1d763957d1060107\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa6dfa89c0cadba9b079edef5fc3e802621d10a85bdeaa85f94acc20fb825d95\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:17:26Z\\\",\\\"message\\\":\\\"de?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:26Z is after 2025-08-24T17:21:41Z]\\\\nI1008 07:17:26.857171 6088 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1008 07:17:26.857158 6088 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-daemon]} name:Service_openshift-machine-config-operator/machine-config-daemon_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.43:8798: 10.217.4.43:9001:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {a36f6289-d09f-43f8-8a8a-c9d2cc11eb0d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1008 07:17:26.857183 6088 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1008 07:17:26.857140 6088 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/contr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://884b809f73bb25122c848e6529f36ca0a71a8622cf6fc4be1d763957d1060107\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:17:44Z\\\",\\\"message\\\":\\\"V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1008 07:17:44.391870 6302 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1008 07:17:44.391862 6302 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/community-operators]} name:Service_openshift-marketplace/community-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.189:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d389393c-7ba9-422c-b3f5-06e391d537d2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1008 07:17:44.391894 6302 services_controller.go:445] Built service openshift-network-console/networking-console-plugin LB template configs for network=default: []services.lbConfig(nil)\\\\nF1008 07:17:44.391901 6302 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.439237 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.458353 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.461897 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.461932 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.461942 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.461992 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.462015 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:45Z","lastTransitionTime":"2025-10-08T07:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.477651 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.491861 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.503585 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.516413 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b2lbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f68f540-8d3f-4081-8c7e-cd5023991ada\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b2lbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.534249 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.548886 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.564020 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.565757 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.565858 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.565874 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.565941 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.565957 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:45Z","lastTransitionTime":"2025-10-08T07:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.581345 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.602973 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.618946 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.634408 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"94f930b8-435a-415e-a2a8-cf8d2d04e134\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f3a2c49f8c7746e20a39b3e78e8664106e022f11c5e988ac314e9332e4f6501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9eee3983dfb28c7de4e9c0ca587a70b65fec1e7c049e1ebe09e6948917e96da5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-t49dn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.651685 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac29deaa-d09f-4987-a04b-0b8f4188543c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7e1786f78d12b347add638d55c544fc801321848b51bf78229917eca9bf4441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd5c17f3c2d3acc27a1313d24b59f0fad8c0b57bdd789587fee6e4821bb8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b5cb5fac06e68b9604fb24cff750293a0545225bfecf0e60f748a1dde8de32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.668170 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.669760 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.669800 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.669832 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.669855 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.669870 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:45Z","lastTransitionTime":"2025-10-08T07:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.766020 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-snt7l_379c61a3-51ff-4bdf-ab8b-5af8bf090716/ovnkube-controller/2.log" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.772088 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.772143 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.772161 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.772100 4693 scope.go:117] "RemoveContainer" containerID="884b809f73bb25122c848e6529f36ca0a71a8622cf6fc4be1d763957d1060107" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.772185 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.772365 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:45Z","lastTransitionTime":"2025-10-08T07:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:45 crc kubenswrapper[4693]: E1008 07:17:45.772473 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-snt7l_openshift-ovn-kubernetes(379c61a3-51ff-4bdf-ab8b-5af8bf090716)\"" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.794737 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"94f930b8-435a-415e-a2a8-cf8d2d04e134\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f3a2c49f8c7746e20a39b3e78e8664106e022f11c5e988ac314e9332e4f6501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9eee3983dfb28c7de4e9c0ca587a70b65fec1e7c049e1ebe09e6948917e96da5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-t49dn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.813708 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac29deaa-d09f-4987-a04b-0b8f4188543c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7e1786f78d12b347add638d55c544fc801321848b51bf78229917eca9bf4441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd5c17f3c2d3acc27a1313d24b59f0fad8c0b57bdd789587fee6e4821bb8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b5cb5fac06e68b9604fb24cff750293a0545225bfecf0e60f748a1dde8de32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.831722 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.846867 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.875378 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://884b809f73bb25122c848e6529f36ca0a71a8622cf6fc4be1d763957d1060107\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://884b809f73bb25122c848e6529f36ca0a71a8622cf6fc4be1d763957d1060107\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:17:44Z\\\",\\\"message\\\":\\\"V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1008 07:17:44.391870 6302 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1008 07:17:44.391862 6302 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/community-operators]} name:Service_openshift-marketplace/community-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.189:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d389393c-7ba9-422c-b3f5-06e391d537d2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1008 07:17:44.391894 6302 services_controller.go:445] Built service openshift-network-console/networking-console-plugin LB template configs for network=default: []services.lbConfig(nil)\\\\nF1008 07:17:44.391901 6302 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-snt7l_openshift-ovn-kubernetes(379c61a3-51ff-4bdf-ab8b-5af8bf090716)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.875747 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.875787 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.875797 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.875832 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.875843 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:45Z","lastTransitionTime":"2025-10-08T07:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.898750 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.924121 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6773a0911b37e7426179f4daf47411d07b37431c9952fd3322c1a5abfb8cd10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.945194 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.962419 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.979104 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.979167 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.979188 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.979220 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.979239 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:45Z","lastTransitionTime":"2025-10-08T07:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.981142 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:45 crc kubenswrapper[4693]: I1008 07:17:45.997139 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b2lbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f68f540-8d3f-4081-8c7e-cd5023991ada\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b2lbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:45Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.018128 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:46Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.045982 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:46Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.067162 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:46Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.083337 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.083381 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.083397 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.083422 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.083439 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:46Z","lastTransitionTime":"2025-10-08T07:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.095179 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:46Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.113217 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:46Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.130426 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:46Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.187058 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.187119 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.187138 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.187165 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.187183 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:46Z","lastTransitionTime":"2025-10-08T07:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.291102 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.291153 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.291168 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.291186 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.291199 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:46Z","lastTransitionTime":"2025-10-08T07:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.297689 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0f68f540-8d3f-4081-8c7e-cd5023991ada-metrics-certs\") pod \"network-metrics-daemon-b2lbv\" (UID: \"0f68f540-8d3f-4081-8c7e-cd5023991ada\") " pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:17:46 crc kubenswrapper[4693]: E1008 07:17:46.297922 4693 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 08 07:17:46 crc kubenswrapper[4693]: E1008 07:17:46.298012 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0f68f540-8d3f-4081-8c7e-cd5023991ada-metrics-certs podName:0f68f540-8d3f-4081-8c7e-cd5023991ada nodeName:}" failed. No retries permitted until 2025-10-08 07:18:02.297987495 +0000 UTC m=+67.668952440 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0f68f540-8d3f-4081-8c7e-cd5023991ada-metrics-certs") pod "network-metrics-daemon-b2lbv" (UID: "0f68f540-8d3f-4081-8c7e-cd5023991ada") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.362123 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:17:46 crc kubenswrapper[4693]: E1008 07:17:46.362346 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.395149 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.395202 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.395212 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.395231 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.395243 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:46Z","lastTransitionTime":"2025-10-08T07:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.499225 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.499909 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.499951 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.499996 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.500014 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:46Z","lastTransitionTime":"2025-10-08T07:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.603726 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.603847 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.603897 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.604022 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.604047 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:46Z","lastTransitionTime":"2025-10-08T07:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.707616 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.707700 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.707719 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.707752 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.707776 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:46Z","lastTransitionTime":"2025-10-08T07:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.811210 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.811290 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.811308 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.811339 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.811359 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:46Z","lastTransitionTime":"2025-10-08T07:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.914638 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.914701 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.914719 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.914743 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:46 crc kubenswrapper[4693]: I1008 07:17:46.914761 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:46Z","lastTransitionTime":"2025-10-08T07:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.007325 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:17:47 crc kubenswrapper[4693]: E1008 07:17:47.007483 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:18:19.007448314 +0000 UTC m=+84.378413279 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.018223 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.018279 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.018299 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.018325 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.018343 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:47Z","lastTransitionTime":"2025-10-08T07:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.109077 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.109152 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.109200 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.109232 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:47 crc kubenswrapper[4693]: E1008 07:17:47.109441 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 08 07:17:47 crc kubenswrapper[4693]: E1008 07:17:47.109450 4693 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 08 07:17:47 crc kubenswrapper[4693]: E1008 07:17:47.109518 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 08 07:17:47 crc kubenswrapper[4693]: E1008 07:17:47.109537 4693 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 08 07:17:47 crc kubenswrapper[4693]: E1008 07:17:47.109561 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-08 07:18:19.109537599 +0000 UTC m=+84.480502614 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 08 07:17:47 crc kubenswrapper[4693]: E1008 07:17:47.109564 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 08 07:17:47 crc kubenswrapper[4693]: E1008 07:17:47.109727 4693 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:47 crc kubenswrapper[4693]: E1008 07:17:47.109730 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-08 07:18:19.109696964 +0000 UTC m=+84.480661929 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 08 07:17:47 crc kubenswrapper[4693]: E1008 07:17:47.109470 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 08 07:17:47 crc kubenswrapper[4693]: E1008 07:17:47.109878 4693 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:47 crc kubenswrapper[4693]: E1008 07:17:47.109809 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-08 07:18:19.109783266 +0000 UTC m=+84.480748241 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:47 crc kubenswrapper[4693]: E1008 07:17:47.109963 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-08 07:18:19.10994337 +0000 UTC m=+84.480908345 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.120993 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.121036 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.121045 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.121061 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.121073 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:47Z","lastTransitionTime":"2025-10-08T07:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.223608 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.223673 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.223692 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.223722 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.223742 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:47Z","lastTransitionTime":"2025-10-08T07:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.326468 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.326550 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.326569 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.326603 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.326624 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:47Z","lastTransitionTime":"2025-10-08T07:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.361989 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.362031 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.362160 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:47 crc kubenswrapper[4693]: E1008 07:17:47.362372 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:17:47 crc kubenswrapper[4693]: E1008 07:17:47.362507 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:17:47 crc kubenswrapper[4693]: E1008 07:17:47.362708 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.430310 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.430371 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.430391 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.430419 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.430437 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:47Z","lastTransitionTime":"2025-10-08T07:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.534496 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.534574 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.534595 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.534624 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.534645 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:47Z","lastTransitionTime":"2025-10-08T07:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.637100 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.637163 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.637182 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.637211 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.637228 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:47Z","lastTransitionTime":"2025-10-08T07:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.740321 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.740383 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.740403 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.740435 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.740458 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:47Z","lastTransitionTime":"2025-10-08T07:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.843716 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.843758 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.843788 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.843807 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.843832 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:47Z","lastTransitionTime":"2025-10-08T07:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.946711 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.946764 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.946785 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.946804 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:47 crc kubenswrapper[4693]: I1008 07:17:47.946840 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:47Z","lastTransitionTime":"2025-10-08T07:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.050071 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.050125 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.050139 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.050162 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.050177 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:48Z","lastTransitionTime":"2025-10-08T07:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.153041 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.153109 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.153121 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.153139 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.153149 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:48Z","lastTransitionTime":"2025-10-08T07:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.255652 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.255706 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.255718 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.255738 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.255751 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:48Z","lastTransitionTime":"2025-10-08T07:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.358583 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.358619 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.358631 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.358648 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.358661 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:48Z","lastTransitionTime":"2025-10-08T07:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.362495 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:17:48 crc kubenswrapper[4693]: E1008 07:17:48.362607 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.461974 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.462046 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.462064 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.462089 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.462107 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:48Z","lastTransitionTime":"2025-10-08T07:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.564931 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.564970 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.564979 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.564992 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.565003 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:48Z","lastTransitionTime":"2025-10-08T07:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.667962 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.668039 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.668058 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.668084 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.668105 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:48Z","lastTransitionTime":"2025-10-08T07:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.771892 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.771969 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.771980 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.772002 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.772014 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:48Z","lastTransitionTime":"2025-10-08T07:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.875301 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.875385 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.875416 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.875942 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.876026 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:48Z","lastTransitionTime":"2025-10-08T07:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.979134 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.979199 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.979235 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.979255 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:48 crc kubenswrapper[4693]: I1008 07:17:48.979268 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:48Z","lastTransitionTime":"2025-10-08T07:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.082557 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.082615 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.082632 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.082657 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.082674 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:49Z","lastTransitionTime":"2025-10-08T07:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.185978 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.186019 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.186028 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.186044 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.186057 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:49Z","lastTransitionTime":"2025-10-08T07:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.288375 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.288463 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.288483 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.288508 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.288525 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:49Z","lastTransitionTime":"2025-10-08T07:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.361942 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.361952 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:49 crc kubenswrapper[4693]: E1008 07:17:49.362179 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.361990 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:49 crc kubenswrapper[4693]: E1008 07:17:49.362248 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:17:49 crc kubenswrapper[4693]: E1008 07:17:49.362377 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.391286 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.391373 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.391398 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.391431 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.391457 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:49Z","lastTransitionTime":"2025-10-08T07:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.494633 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.494707 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.494725 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.494757 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.494779 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:49Z","lastTransitionTime":"2025-10-08T07:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.597508 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.597590 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.597602 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.597618 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.597630 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:49Z","lastTransitionTime":"2025-10-08T07:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.701228 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.701287 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.701305 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.701334 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.701354 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:49Z","lastTransitionTime":"2025-10-08T07:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.803565 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.804111 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.804200 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.804319 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.804495 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:49Z","lastTransitionTime":"2025-10-08T07:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.907591 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.908028 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.908129 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.908239 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:49 crc kubenswrapper[4693]: I1008 07:17:49.908335 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:49Z","lastTransitionTime":"2025-10-08T07:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.012263 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.012367 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.012392 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.012419 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.012440 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:50Z","lastTransitionTime":"2025-10-08T07:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.115623 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.115685 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.115720 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.115744 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.115759 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:50Z","lastTransitionTime":"2025-10-08T07:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.219666 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.219740 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.219765 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.219793 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.219810 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:50Z","lastTransitionTime":"2025-10-08T07:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.323585 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.323634 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.323645 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.323664 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.323675 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:50Z","lastTransitionTime":"2025-10-08T07:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.361845 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:17:50 crc kubenswrapper[4693]: E1008 07:17:50.362101 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.427112 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.427228 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.427257 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.427293 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.427323 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:50Z","lastTransitionTime":"2025-10-08T07:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.531579 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.531628 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.531640 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.531660 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.531674 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:50Z","lastTransitionTime":"2025-10-08T07:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.634864 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.634900 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.634908 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.635177 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.635204 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:50Z","lastTransitionTime":"2025-10-08T07:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.738790 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.738871 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.738891 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.738917 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.738934 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:50Z","lastTransitionTime":"2025-10-08T07:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.842662 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.842741 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.842761 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.843488 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.843513 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:50Z","lastTransitionTime":"2025-10-08T07:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.947450 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.947519 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.947539 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.947568 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:50 crc kubenswrapper[4693]: I1008 07:17:50.947588 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:50Z","lastTransitionTime":"2025-10-08T07:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.050953 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.051016 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.051029 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.051052 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.051064 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:51Z","lastTransitionTime":"2025-10-08T07:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.155201 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.155530 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.155648 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.155756 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.155893 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:51Z","lastTransitionTime":"2025-10-08T07:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.259574 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.259634 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.259655 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.259681 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.259700 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:51Z","lastTransitionTime":"2025-10-08T07:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.362559 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.362726 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.362598 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:51 crc kubenswrapper[4693]: E1008 07:17:51.363062 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.363096 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.363201 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.363233 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.363304 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.363334 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:51Z","lastTransitionTime":"2025-10-08T07:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:51 crc kubenswrapper[4693]: E1008 07:17:51.363538 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:17:51 crc kubenswrapper[4693]: E1008 07:17:51.363738 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.466746 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.466874 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.466897 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.466925 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.466943 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:51Z","lastTransitionTime":"2025-10-08T07:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.573678 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.573740 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.573758 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.573785 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.573806 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:51Z","lastTransitionTime":"2025-10-08T07:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.598551 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.598624 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.598642 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.598670 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.598689 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:51Z","lastTransitionTime":"2025-10-08T07:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:51 crc kubenswrapper[4693]: E1008 07:17:51.614661 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:51Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.620682 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.620739 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.620758 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.620786 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.620804 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:51Z","lastTransitionTime":"2025-10-08T07:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:51 crc kubenswrapper[4693]: E1008 07:17:51.639327 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:51Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.643868 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.643913 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.643965 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.644067 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.644090 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:51Z","lastTransitionTime":"2025-10-08T07:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:51 crc kubenswrapper[4693]: E1008 07:17:51.661676 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:51Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.667613 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.667679 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.667698 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.667731 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.667751 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:51Z","lastTransitionTime":"2025-10-08T07:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:51 crc kubenswrapper[4693]: E1008 07:17:51.688467 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:51Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.693708 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.693767 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.693777 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.693842 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.693860 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:51Z","lastTransitionTime":"2025-10-08T07:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:51 crc kubenswrapper[4693]: E1008 07:17:51.713195 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:51Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:51 crc kubenswrapper[4693]: E1008 07:17:51.713452 4693 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.715874 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.715933 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.715954 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.715983 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.716004 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:51Z","lastTransitionTime":"2025-10-08T07:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.820047 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.820113 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.820133 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.820162 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.820181 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:51Z","lastTransitionTime":"2025-10-08T07:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.938960 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.939077 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.939100 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.939125 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:51 crc kubenswrapper[4693]: I1008 07:17:51.939139 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:51Z","lastTransitionTime":"2025-10-08T07:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.042983 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.043033 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.043046 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.043065 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.043077 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:52Z","lastTransitionTime":"2025-10-08T07:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.148318 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.148398 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.148420 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.148476 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.148499 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:52Z","lastTransitionTime":"2025-10-08T07:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.252279 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.252372 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.252398 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.252434 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.252460 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:52Z","lastTransitionTime":"2025-10-08T07:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.359013 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.359075 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.359093 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.359117 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.359136 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:52Z","lastTransitionTime":"2025-10-08T07:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.361911 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:17:52 crc kubenswrapper[4693]: E1008 07:17:52.362134 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.461865 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.461982 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.462020 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.462060 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.462085 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:52Z","lastTransitionTime":"2025-10-08T07:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.565754 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.565951 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.565976 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.566006 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.566032 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:52Z","lastTransitionTime":"2025-10-08T07:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.670031 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.670083 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.670099 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.670122 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.670137 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:52Z","lastTransitionTime":"2025-10-08T07:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.773930 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.774009 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.774033 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.774073 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.774100 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:52Z","lastTransitionTime":"2025-10-08T07:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.877287 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.877373 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.877392 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.877419 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.877437 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:52Z","lastTransitionTime":"2025-10-08T07:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.981390 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.981455 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.981468 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.981489 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:52 crc kubenswrapper[4693]: I1008 07:17:52.981505 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:52Z","lastTransitionTime":"2025-10-08T07:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.084341 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.084420 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.084448 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.084474 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.084490 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:53Z","lastTransitionTime":"2025-10-08T07:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.188365 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.188458 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.188485 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.188522 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.188541 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:53Z","lastTransitionTime":"2025-10-08T07:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.291757 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.291859 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.291881 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.291910 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.291931 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:53Z","lastTransitionTime":"2025-10-08T07:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.362035 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.362071 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.362157 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:53 crc kubenswrapper[4693]: E1008 07:17:53.362278 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:17:53 crc kubenswrapper[4693]: E1008 07:17:53.362375 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:17:53 crc kubenswrapper[4693]: E1008 07:17:53.362472 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.394517 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.394584 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.394604 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.394627 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.394646 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:53Z","lastTransitionTime":"2025-10-08T07:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.497687 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.497761 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.497781 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.497847 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.497868 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:53Z","lastTransitionTime":"2025-10-08T07:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.600760 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.600854 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.600873 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.600901 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.600925 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:53Z","lastTransitionTime":"2025-10-08T07:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.704704 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.704749 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.704762 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.704781 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.704792 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:53Z","lastTransitionTime":"2025-10-08T07:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.806157 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.806209 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.806217 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.806234 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.806243 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:53Z","lastTransitionTime":"2025-10-08T07:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.909708 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.909777 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.909794 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.909854 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:53 crc kubenswrapper[4693]: I1008 07:17:53.909874 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:53Z","lastTransitionTime":"2025-10-08T07:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.013792 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.013860 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.013869 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.013887 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.013899 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:54Z","lastTransitionTime":"2025-10-08T07:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.116533 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.116604 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.116621 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.116650 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.116674 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:54Z","lastTransitionTime":"2025-10-08T07:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.219443 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.219540 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.219564 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.219597 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.219628 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:54Z","lastTransitionTime":"2025-10-08T07:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.322691 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.322751 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.322769 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.322796 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.322849 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:54Z","lastTransitionTime":"2025-10-08T07:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.362209 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:17:54 crc kubenswrapper[4693]: E1008 07:17:54.362367 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.426077 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.426148 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.426169 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.426198 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.426218 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:54Z","lastTransitionTime":"2025-10-08T07:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.529628 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.529695 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.529713 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.529741 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.529759 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:54Z","lastTransitionTime":"2025-10-08T07:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.632316 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.632387 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.632410 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.632442 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.632466 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:54Z","lastTransitionTime":"2025-10-08T07:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.736022 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.736628 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.736655 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.736716 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.736937 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:54Z","lastTransitionTime":"2025-10-08T07:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.839735 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.839803 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.839870 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.839903 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.839924 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:54Z","lastTransitionTime":"2025-10-08T07:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.943156 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.943267 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.943340 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.943387 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:54 crc kubenswrapper[4693]: I1008 07:17:54.943417 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:54Z","lastTransitionTime":"2025-10-08T07:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.047590 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.047729 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.047745 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.047836 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.047881 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:55Z","lastTransitionTime":"2025-10-08T07:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.151365 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.151431 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.151448 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.151473 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.151490 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:55Z","lastTransitionTime":"2025-10-08T07:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.254498 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.254584 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.254603 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.254640 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.254661 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:55Z","lastTransitionTime":"2025-10-08T07:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.357634 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.357692 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.357706 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.357728 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.357741 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:55Z","lastTransitionTime":"2025-10-08T07:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.362136 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.362147 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.362165 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:55 crc kubenswrapper[4693]: E1008 07:17:55.362581 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:17:55 crc kubenswrapper[4693]: E1008 07:17:55.362722 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:17:55 crc kubenswrapper[4693]: E1008 07:17:55.363073 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.384041 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:55Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.408497 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6773a0911b37e7426179f4daf47411d07b37431c9952fd3322c1a5abfb8cd10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:55Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.441602 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://884b809f73bb25122c848e6529f36ca0a71a8622cf6fc4be1d763957d1060107\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://884b809f73bb25122c848e6529f36ca0a71a8622cf6fc4be1d763957d1060107\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:17:44Z\\\",\\\"message\\\":\\\"V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1008 07:17:44.391870 6302 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1008 07:17:44.391862 6302 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/community-operators]} name:Service_openshift-marketplace/community-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.189:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d389393c-7ba9-422c-b3f5-06e391d537d2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1008 07:17:44.391894 6302 services_controller.go:445] Built service openshift-network-console/networking-console-plugin LB template configs for network=default: []services.lbConfig(nil)\\\\nF1008 07:17:44.391901 6302 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-snt7l_openshift-ovn-kubernetes(379c61a3-51ff-4bdf-ab8b-5af8bf090716)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:55Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.460397 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.460468 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.460486 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.460557 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.460576 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:55Z","lastTransitionTime":"2025-10-08T07:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.464463 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b2lbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f68f540-8d3f-4081-8c7e-cd5023991ada\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b2lbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:55Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.481769 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:55Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.497371 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:55Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.513783 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:55Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.527645 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:55Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.541644 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:55Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.561370 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:55Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.564254 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.564492 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.564654 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.564706 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.564779 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:55Z","lastTransitionTime":"2025-10-08T07:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.582884 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:55Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.597746 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:55Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.618607 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:55Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.643116 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac29deaa-d09f-4987-a04b-0b8f4188543c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7e1786f78d12b347add638d55c544fc801321848b51bf78229917eca9bf4441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd5c17f3c2d3acc27a1313d24b59f0fad8c0b57bdd789587fee6e4821bb8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b5cb5fac06e68b9604fb24cff750293a0545225bfecf0e60f748a1dde8de32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:55Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.659263 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:55Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.668030 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.668106 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.668120 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.668146 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.668159 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:55Z","lastTransitionTime":"2025-10-08T07:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.674434 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:55Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.691128 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"94f930b8-435a-415e-a2a8-cf8d2d04e134\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f3a2c49f8c7746e20a39b3e78e8664106e022f11c5e988ac314e9332e4f6501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9eee3983dfb28c7de4e9c0ca587a70b65fec1e7c049e1ebe09e6948917e96da5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-t49dn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:17:55Z is after 2025-08-24T17:21:41Z" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.771007 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.771084 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.771103 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.771130 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.771148 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:55Z","lastTransitionTime":"2025-10-08T07:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.874758 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.875514 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.875602 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.875692 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.875772 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:55Z","lastTransitionTime":"2025-10-08T07:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.979245 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.979316 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.979330 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.979353 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:55 crc kubenswrapper[4693]: I1008 07:17:55.979371 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:55Z","lastTransitionTime":"2025-10-08T07:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.083168 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.083728 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.083852 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.083947 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.084040 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:56Z","lastTransitionTime":"2025-10-08T07:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.187636 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.187712 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.187731 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.187760 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.187779 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:56Z","lastTransitionTime":"2025-10-08T07:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.291921 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.292222 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.292337 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.292621 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.292706 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:56Z","lastTransitionTime":"2025-10-08T07:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.362026 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:17:56 crc kubenswrapper[4693]: E1008 07:17:56.362448 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.397165 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.397873 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.398057 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.398226 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.398395 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:56Z","lastTransitionTime":"2025-10-08T07:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.501962 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.502016 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.502030 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.502053 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.502065 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:56Z","lastTransitionTime":"2025-10-08T07:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.605310 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.605377 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.605387 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.605407 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.605419 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:56Z","lastTransitionTime":"2025-10-08T07:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.709333 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.709399 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.709417 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.709445 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.709464 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:56Z","lastTransitionTime":"2025-10-08T07:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.813399 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.813879 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.814029 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.814168 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.814313 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:56Z","lastTransitionTime":"2025-10-08T07:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.918374 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.918480 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.918573 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.918607 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:56 crc kubenswrapper[4693]: I1008 07:17:56.918630 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:56Z","lastTransitionTime":"2025-10-08T07:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.022315 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.022715 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.022878 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.023024 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.023158 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:57Z","lastTransitionTime":"2025-10-08T07:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.126900 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.126997 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.127020 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.127054 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.127079 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:57Z","lastTransitionTime":"2025-10-08T07:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.230287 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.230399 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.230428 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.230460 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.230479 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:57Z","lastTransitionTime":"2025-10-08T07:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.334236 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.334299 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.334318 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.334344 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.334361 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:57Z","lastTransitionTime":"2025-10-08T07:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.363105 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.363147 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:57 crc kubenswrapper[4693]: E1008 07:17:57.363294 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:17:57 crc kubenswrapper[4693]: E1008 07:17:57.363456 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.363975 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:57 crc kubenswrapper[4693]: E1008 07:17:57.364142 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.364533 4693 scope.go:117] "RemoveContainer" containerID="884b809f73bb25122c848e6529f36ca0a71a8622cf6fc4be1d763957d1060107" Oct 08 07:17:57 crc kubenswrapper[4693]: E1008 07:17:57.365008 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-snt7l_openshift-ovn-kubernetes(379c61a3-51ff-4bdf-ab8b-5af8bf090716)\"" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.437250 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.437312 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.437329 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.437353 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.437371 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:57Z","lastTransitionTime":"2025-10-08T07:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.540798 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.540897 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.540915 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.540946 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.540964 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:57Z","lastTransitionTime":"2025-10-08T07:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.648560 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.649026 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.649227 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.649429 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.649627 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:57Z","lastTransitionTime":"2025-10-08T07:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.753159 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.753251 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.753277 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.753310 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.753333 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:57Z","lastTransitionTime":"2025-10-08T07:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.856562 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.856639 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.856660 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.856691 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.856712 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:57Z","lastTransitionTime":"2025-10-08T07:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.959172 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.959246 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.959269 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.959302 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:57 crc kubenswrapper[4693]: I1008 07:17:57.959324 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:57Z","lastTransitionTime":"2025-10-08T07:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.062938 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.063032 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.063070 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.063108 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.063141 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:58Z","lastTransitionTime":"2025-10-08T07:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.166078 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.166149 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.166172 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.166205 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.166232 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:58Z","lastTransitionTime":"2025-10-08T07:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.269317 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.269379 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.269404 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.269432 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.269459 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:58Z","lastTransitionTime":"2025-10-08T07:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.362045 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:17:58 crc kubenswrapper[4693]: E1008 07:17:58.362289 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.374092 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.374141 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.374157 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.374180 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.374198 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:58Z","lastTransitionTime":"2025-10-08T07:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.478116 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.478217 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.478242 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.478276 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.478301 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:58Z","lastTransitionTime":"2025-10-08T07:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.581606 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.581651 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.581661 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.581680 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.581692 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:58Z","lastTransitionTime":"2025-10-08T07:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.684482 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.684530 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.684541 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.684560 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.684571 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:58Z","lastTransitionTime":"2025-10-08T07:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.786886 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.786934 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.786946 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.786964 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.786978 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:58Z","lastTransitionTime":"2025-10-08T07:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.890233 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.890316 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.890335 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.890365 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.890383 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:58Z","lastTransitionTime":"2025-10-08T07:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.993423 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.993493 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.993510 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.993540 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:58 crc kubenswrapper[4693]: I1008 07:17:58.993559 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:58Z","lastTransitionTime":"2025-10-08T07:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.096522 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.096580 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.096593 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.096615 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.096629 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:59Z","lastTransitionTime":"2025-10-08T07:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.199139 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.199181 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.199191 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.199209 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.199220 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:59Z","lastTransitionTime":"2025-10-08T07:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.301966 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.302003 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.302013 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.302029 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.302040 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:59Z","lastTransitionTime":"2025-10-08T07:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.362962 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.363099 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:17:59 crc kubenswrapper[4693]: E1008 07:17:59.363138 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.363122 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:17:59 crc kubenswrapper[4693]: E1008 07:17:59.363284 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:17:59 crc kubenswrapper[4693]: E1008 07:17:59.363388 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.404528 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.404596 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.404608 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.404633 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.404648 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:59Z","lastTransitionTime":"2025-10-08T07:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.508290 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.509134 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.509230 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.509307 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.509364 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:59Z","lastTransitionTime":"2025-10-08T07:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.611502 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.611558 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.611569 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.611584 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.611595 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:59Z","lastTransitionTime":"2025-10-08T07:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.714631 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.714924 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.714947 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.714968 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.714979 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:59Z","lastTransitionTime":"2025-10-08T07:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.818660 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.818718 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.818730 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.818756 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.818769 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:59Z","lastTransitionTime":"2025-10-08T07:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.922176 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.922248 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.922263 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.922284 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:17:59 crc kubenswrapper[4693]: I1008 07:17:59.922297 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:17:59Z","lastTransitionTime":"2025-10-08T07:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.024551 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.024597 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.024608 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.024627 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.024639 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:00Z","lastTransitionTime":"2025-10-08T07:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.126687 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.126737 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.126747 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.126765 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.126777 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:00Z","lastTransitionTime":"2025-10-08T07:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.229309 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.229353 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.229364 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.229382 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.229394 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:00Z","lastTransitionTime":"2025-10-08T07:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.332023 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.332073 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.332086 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.332110 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.332123 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:00Z","lastTransitionTime":"2025-10-08T07:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.362773 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:00 crc kubenswrapper[4693]: E1008 07:18:00.362901 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.434795 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.434874 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.434884 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.434905 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.434928 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:00Z","lastTransitionTime":"2025-10-08T07:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.537225 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.537272 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.537284 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.537304 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.537316 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:00Z","lastTransitionTime":"2025-10-08T07:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.640473 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.640522 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.640532 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.640548 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.640559 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:00Z","lastTransitionTime":"2025-10-08T07:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.743508 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.743564 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.743574 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.743596 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.743610 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:00Z","lastTransitionTime":"2025-10-08T07:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.846651 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.846742 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.846764 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.846794 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.846843 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:00Z","lastTransitionTime":"2025-10-08T07:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.949272 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.949351 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.949370 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.949402 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:00 crc kubenswrapper[4693]: I1008 07:18:00.949423 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:00Z","lastTransitionTime":"2025-10-08T07:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.053216 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.053291 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.053306 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.053328 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.053342 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:01Z","lastTransitionTime":"2025-10-08T07:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.156004 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.156068 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.156079 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.156098 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.156111 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:01Z","lastTransitionTime":"2025-10-08T07:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.259000 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.259052 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.259063 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.259084 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.259094 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:01Z","lastTransitionTime":"2025-10-08T07:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.362252 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.362290 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.362271 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.362301 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.362347 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.362367 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.362378 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:01Z","lastTransitionTime":"2025-10-08T07:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.362452 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:01 crc kubenswrapper[4693]: E1008 07:18:01.362680 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:01 crc kubenswrapper[4693]: E1008 07:18:01.362741 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:01 crc kubenswrapper[4693]: E1008 07:18:01.362869 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.466140 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.466190 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.466202 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.466223 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.466238 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:01Z","lastTransitionTime":"2025-10-08T07:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.569846 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.569919 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.569940 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.569969 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.569990 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:01Z","lastTransitionTime":"2025-10-08T07:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.673487 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.673545 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.673562 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.673581 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.673595 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:01Z","lastTransitionTime":"2025-10-08T07:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.777729 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.778358 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.780023 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.780101 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.780117 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:01Z","lastTransitionTime":"2025-10-08T07:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.883479 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.883543 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.883553 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.883570 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.883586 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:01Z","lastTransitionTime":"2025-10-08T07:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.992736 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.992786 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.992798 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.992845 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:01 crc kubenswrapper[4693]: I1008 07:18:01.992859 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:01Z","lastTransitionTime":"2025-10-08T07:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.047839 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.047889 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.047899 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.047916 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.047927 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:02Z","lastTransitionTime":"2025-10-08T07:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:02 crc kubenswrapper[4693]: E1008 07:18:02.065016 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:02Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.069942 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.069991 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.070002 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.070026 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.070037 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:02Z","lastTransitionTime":"2025-10-08T07:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:02 crc kubenswrapper[4693]: E1008 07:18:02.082247 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:02Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.086060 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.086091 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.086101 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.086112 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.086122 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:02Z","lastTransitionTime":"2025-10-08T07:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:02 crc kubenswrapper[4693]: E1008 07:18:02.096429 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:02Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.100644 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.100792 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.100914 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.101015 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.101113 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:02Z","lastTransitionTime":"2025-10-08T07:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:02 crc kubenswrapper[4693]: E1008 07:18:02.114469 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:02Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.118185 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.118218 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.118227 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.118246 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.118259 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:02Z","lastTransitionTime":"2025-10-08T07:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:02 crc kubenswrapper[4693]: E1008 07:18:02.130092 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:02Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:02 crc kubenswrapper[4693]: E1008 07:18:02.130243 4693 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.132157 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.132190 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.132200 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.132218 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.132234 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:02Z","lastTransitionTime":"2025-10-08T07:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.234841 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.234890 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.234903 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.234921 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.234934 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:02Z","lastTransitionTime":"2025-10-08T07:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.338533 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.338591 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.338601 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.338621 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.338634 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:02Z","lastTransitionTime":"2025-10-08T07:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.362412 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:02 crc kubenswrapper[4693]: E1008 07:18:02.362871 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.396526 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0f68f540-8d3f-4081-8c7e-cd5023991ada-metrics-certs\") pod \"network-metrics-daemon-b2lbv\" (UID: \"0f68f540-8d3f-4081-8c7e-cd5023991ada\") " pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:02 crc kubenswrapper[4693]: E1008 07:18:02.396801 4693 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 08 07:18:02 crc kubenswrapper[4693]: E1008 07:18:02.396954 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0f68f540-8d3f-4081-8c7e-cd5023991ada-metrics-certs podName:0f68f540-8d3f-4081-8c7e-cd5023991ada nodeName:}" failed. No retries permitted until 2025-10-08 07:18:34.396925494 +0000 UTC m=+99.767890629 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0f68f540-8d3f-4081-8c7e-cd5023991ada-metrics-certs") pod "network-metrics-daemon-b2lbv" (UID: "0f68f540-8d3f-4081-8c7e-cd5023991ada") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.441506 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.441903 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.442047 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.442185 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.442336 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:02Z","lastTransitionTime":"2025-10-08T07:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.546283 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.546364 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.546382 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.546411 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.546431 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:02Z","lastTransitionTime":"2025-10-08T07:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.648946 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.648979 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.648988 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.649004 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.649015 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:02Z","lastTransitionTime":"2025-10-08T07:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.751438 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.751466 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.751476 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.751491 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.751502 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:02Z","lastTransitionTime":"2025-10-08T07:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.855001 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.855061 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.855077 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.855101 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.855118 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:02Z","lastTransitionTime":"2025-10-08T07:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.957538 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.957595 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.957607 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.957632 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:02 crc kubenswrapper[4693]: I1008 07:18:02.957646 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:02Z","lastTransitionTime":"2025-10-08T07:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.059953 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.060001 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.060011 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.060028 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.060040 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:03Z","lastTransitionTime":"2025-10-08T07:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.162162 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.162222 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.162234 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.162258 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.162271 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:03Z","lastTransitionTime":"2025-10-08T07:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.265099 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.265179 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.265208 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.265237 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.265256 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:03Z","lastTransitionTime":"2025-10-08T07:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.362874 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.362896 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.362914 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:03 crc kubenswrapper[4693]: E1008 07:18:03.363109 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:03 crc kubenswrapper[4693]: E1008 07:18:03.363199 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:03 crc kubenswrapper[4693]: E1008 07:18:03.363380 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.368010 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.368045 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.368058 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.368073 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.368087 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:03Z","lastTransitionTime":"2025-10-08T07:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.471346 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.471424 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.471447 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.471477 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.471496 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:03Z","lastTransitionTime":"2025-10-08T07:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.575593 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.575663 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.575674 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.575691 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.575703 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:03Z","lastTransitionTime":"2025-10-08T07:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.679221 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.679277 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.679295 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.679320 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.679336 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:03Z","lastTransitionTime":"2025-10-08T07:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.784331 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.784382 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.784395 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.784413 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.784428 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:03Z","lastTransitionTime":"2025-10-08T07:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.842857 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bfhs8_8ddc214e-6569-4b0e-8783-f484a001ce6a/kube-multus/0.log" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.842910 4693 generic.go:334] "Generic (PLEG): container finished" podID="8ddc214e-6569-4b0e-8783-f484a001ce6a" containerID="0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431" exitCode=1 Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.842945 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bfhs8" event={"ID":"8ddc214e-6569-4b0e-8783-f484a001ce6a","Type":"ContainerDied","Data":"0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431"} Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.843335 4693 scope.go:117] "RemoveContainer" containerID="0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.867575 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:03Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.883837 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:03Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.886437 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.886465 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.886477 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.886493 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.886504 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:03Z","lastTransitionTime":"2025-10-08T07:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.903165 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"94f930b8-435a-415e-a2a8-cf8d2d04e134\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f3a2c49f8c7746e20a39b3e78e8664106e022f11c5e988ac314e9332e4f6501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9eee3983dfb28c7de4e9c0ca587a70b65fec1e7c049e1ebe09e6948917e96da5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-t49dn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:03Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.921199 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac29deaa-d09f-4987-a04b-0b8f4188543c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7e1786f78d12b347add638d55c544fc801321848b51bf78229917eca9bf4441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd5c17f3c2d3acc27a1313d24b59f0fad8c0b57bdd789587fee6e4821bb8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b5cb5fac06e68b9604fb24cff750293a0545225bfecf0e60f748a1dde8de32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:03Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.938842 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:03Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.954514 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6773a0911b37e7426179f4daf47411d07b37431c9952fd3322c1a5abfb8cd10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:03Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.978713 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://884b809f73bb25122c848e6529f36ca0a71a8622cf6fc4be1d763957d1060107\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://884b809f73bb25122c848e6529f36ca0a71a8622cf6fc4be1d763957d1060107\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:17:44Z\\\",\\\"message\\\":\\\"V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1008 07:17:44.391870 6302 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1008 07:17:44.391862 6302 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/community-operators]} name:Service_openshift-marketplace/community-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.189:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d389393c-7ba9-422c-b3f5-06e391d537d2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1008 07:17:44.391894 6302 services_controller.go:445] Built service openshift-network-console/networking-console-plugin LB template configs for network=default: []services.lbConfig(nil)\\\\nF1008 07:17:44.391901 6302 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-snt7l_openshift-ovn-kubernetes(379c61a3-51ff-4bdf-ab8b-5af8bf090716)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:03Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.989641 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.989671 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.989680 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.989696 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.989706 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:03Z","lastTransitionTime":"2025-10-08T07:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:03 crc kubenswrapper[4693]: I1008 07:18:03.992584 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:03Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.005914 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:04Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.019493 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:04Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.033761 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:04Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.045486 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:04Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.060087 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b2lbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f68f540-8d3f-4081-8c7e-cd5023991ada\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b2lbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:04Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.075339 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:04Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.091099 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:04Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.092947 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.093205 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.093480 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.093682 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.093921 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:04Z","lastTransitionTime":"2025-10-08T07:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.105240 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:18:03Z\\\",\\\"message\\\":\\\"2025-10-08T07:17:17+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_6e407c51-a7c1-42ce-86ca-3d5c4bf76d54\\\\n2025-10-08T07:17:17+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_6e407c51-a7c1-42ce-86ca-3d5c4bf76d54 to /host/opt/cni/bin/\\\\n2025-10-08T07:17:17Z [verbose] multus-daemon started\\\\n2025-10-08T07:17:17Z [verbose] Readiness Indicator file check\\\\n2025-10-08T07:18:02Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:04Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.118145 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:04Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.197611 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.197685 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.197710 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.197741 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.197762 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:04Z","lastTransitionTime":"2025-10-08T07:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.301756 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.301824 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.301840 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.301858 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.301870 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:04Z","lastTransitionTime":"2025-10-08T07:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.362521 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:04 crc kubenswrapper[4693]: E1008 07:18:04.362674 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.404927 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.404964 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.404973 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.404988 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.404997 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:04Z","lastTransitionTime":"2025-10-08T07:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.508428 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.508521 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.508548 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.508575 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.508596 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:04Z","lastTransitionTime":"2025-10-08T07:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.612112 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.612169 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.612185 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.612212 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.612230 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:04Z","lastTransitionTime":"2025-10-08T07:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.716036 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.716099 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.716119 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.716150 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.716171 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:04Z","lastTransitionTime":"2025-10-08T07:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.820028 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.820099 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.820119 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.820152 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.820176 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:04Z","lastTransitionTime":"2025-10-08T07:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.850578 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bfhs8_8ddc214e-6569-4b0e-8783-f484a001ce6a/kube-multus/0.log" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.850681 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bfhs8" event={"ID":"8ddc214e-6569-4b0e-8783-f484a001ce6a","Type":"ContainerStarted","Data":"07051e612172de072a51e3a44710a13a800e43665c0a76d52b1989d34cf85d3a"} Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.875942 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:04Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.893724 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:04Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.907176 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:04Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.923014 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.923046 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.923056 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.923072 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.923084 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:04Z","lastTransitionTime":"2025-10-08T07:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.928778 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:04Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.942438 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b2lbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f68f540-8d3f-4081-8c7e-cd5023991ada\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b2lbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:04Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.959259 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:04Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.976048 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:04Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:04 crc kubenswrapper[4693]: I1008 07:18:04.991320 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07051e612172de072a51e3a44710a13a800e43665c0a76d52b1989d34cf85d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:18:03Z\\\",\\\"message\\\":\\\"2025-10-08T07:17:17+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_6e407c51-a7c1-42ce-86ca-3d5c4bf76d54\\\\n2025-10-08T07:17:17+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_6e407c51-a7c1-42ce-86ca-3d5c4bf76d54 to /host/opt/cni/bin/\\\\n2025-10-08T07:17:17Z [verbose] multus-daemon started\\\\n2025-10-08T07:17:17Z [verbose] Readiness Indicator file check\\\\n2025-10-08T07:18:02Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:18:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:04Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.006879 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:05Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.024993 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:05Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.025555 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.025614 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.025624 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.025643 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.025656 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:05Z","lastTransitionTime":"2025-10-08T07:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.042637 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:05Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.062456 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"94f930b8-435a-415e-a2a8-cf8d2d04e134\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f3a2c49f8c7746e20a39b3e78e8664106e022f11c5e988ac314e9332e4f6501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9eee3983dfb28c7de4e9c0ca587a70b65fec1e7c049e1ebe09e6948917e96da5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-t49dn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:05Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.075687 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac29deaa-d09f-4987-a04b-0b8f4188543c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7e1786f78d12b347add638d55c544fc801321848b51bf78229917eca9bf4441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd5c17f3c2d3acc27a1313d24b59f0fad8c0b57bdd789587fee6e4821bb8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b5cb5fac06e68b9604fb24cff750293a0545225bfecf0e60f748a1dde8de32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:05Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.088662 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:05Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.104411 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6773a0911b37e7426179f4daf47411d07b37431c9952fd3322c1a5abfb8cd10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:05Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.128761 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.128795 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.128806 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.128850 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.128861 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:05Z","lastTransitionTime":"2025-10-08T07:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.129083 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://884b809f73bb25122c848e6529f36ca0a71a8622cf6fc4be1d763957d1060107\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://884b809f73bb25122c848e6529f36ca0a71a8622cf6fc4be1d763957d1060107\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:17:44Z\\\",\\\"message\\\":\\\"V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1008 07:17:44.391870 6302 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1008 07:17:44.391862 6302 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/community-operators]} name:Service_openshift-marketplace/community-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.189:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d389393c-7ba9-422c-b3f5-06e391d537d2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1008 07:17:44.391894 6302 services_controller.go:445] Built service openshift-network-console/networking-console-plugin LB template configs for network=default: []services.lbConfig(nil)\\\\nF1008 07:17:44.391901 6302 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-snt7l_openshift-ovn-kubernetes(379c61a3-51ff-4bdf-ab8b-5af8bf090716)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:05Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.146109 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:05Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.231248 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.231305 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.231318 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.231340 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.231355 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:05Z","lastTransitionTime":"2025-10-08T07:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.333670 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.333722 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.333733 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.333752 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.333764 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:05Z","lastTransitionTime":"2025-10-08T07:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.362947 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.363042 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:05 crc kubenswrapper[4693]: E1008 07:18:05.363124 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.363059 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:05 crc kubenswrapper[4693]: E1008 07:18:05.363229 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:05 crc kubenswrapper[4693]: E1008 07:18:05.363384 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.379971 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:05Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.396024 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:05Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.409920 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:05Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.423687 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b2lbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f68f540-8d3f-4081-8c7e-cd5023991ada\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b2lbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:05Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.435461 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.435503 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.435515 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.435536 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.435549 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:05Z","lastTransitionTime":"2025-10-08T07:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.443265 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:05Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.458961 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:05Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.474862 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07051e612172de072a51e3a44710a13a800e43665c0a76d52b1989d34cf85d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:18:03Z\\\",\\\"message\\\":\\\"2025-10-08T07:17:17+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_6e407c51-a7c1-42ce-86ca-3d5c4bf76d54\\\\n2025-10-08T07:17:17+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_6e407c51-a7c1-42ce-86ca-3d5c4bf76d54 to /host/opt/cni/bin/\\\\n2025-10-08T07:17:17Z [verbose] multus-daemon started\\\\n2025-10-08T07:17:17Z [verbose] Readiness Indicator file check\\\\n2025-10-08T07:18:02Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:18:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:05Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.493615 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:05Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.511374 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:05Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.526508 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:05Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.538075 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.538115 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.538129 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.538149 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.538161 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:05Z","lastTransitionTime":"2025-10-08T07:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.540449 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"94f930b8-435a-415e-a2a8-cf8d2d04e134\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f3a2c49f8c7746e20a39b3e78e8664106e022f11c5e988ac314e9332e4f6501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9eee3983dfb28c7de4e9c0ca587a70b65fec1e7c049e1ebe09e6948917e96da5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-t49dn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:05Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.556424 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac29deaa-d09f-4987-a04b-0b8f4188543c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7e1786f78d12b347add638d55c544fc801321848b51bf78229917eca9bf4441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd5c17f3c2d3acc27a1313d24b59f0fad8c0b57bdd789587fee6e4821bb8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b5cb5fac06e68b9604fb24cff750293a0545225bfecf0e60f748a1dde8de32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:05Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.576287 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:05Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.590899 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:05Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.611305 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://884b809f73bb25122c848e6529f36ca0a71a8622cf6fc4be1d763957d1060107\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://884b809f73bb25122c848e6529f36ca0a71a8622cf6fc4be1d763957d1060107\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:17:44Z\\\",\\\"message\\\":\\\"V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1008 07:17:44.391870 6302 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1008 07:17:44.391862 6302 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/community-operators]} name:Service_openshift-marketplace/community-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.189:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d389393c-7ba9-422c-b3f5-06e391d537d2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1008 07:17:44.391894 6302 services_controller.go:445] Built service openshift-network-console/networking-console-plugin LB template configs for network=default: []services.lbConfig(nil)\\\\nF1008 07:17:44.391901 6302 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-snt7l_openshift-ovn-kubernetes(379c61a3-51ff-4bdf-ab8b-5af8bf090716)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:05Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.623440 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:05Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.639514 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6773a0911b37e7426179f4daf47411d07b37431c9952fd3322c1a5abfb8cd10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:05Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.641475 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.641517 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.641527 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.641543 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.641555 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:05Z","lastTransitionTime":"2025-10-08T07:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.745148 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.745212 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.745228 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.745250 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.745268 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:05Z","lastTransitionTime":"2025-10-08T07:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.848472 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.848533 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.848547 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.848570 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.848586 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:05Z","lastTransitionTime":"2025-10-08T07:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.951625 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.951683 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.951698 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.951718 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:05 crc kubenswrapper[4693]: I1008 07:18:05.951736 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:05Z","lastTransitionTime":"2025-10-08T07:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.054603 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.054653 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.054666 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.054683 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.054694 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:06Z","lastTransitionTime":"2025-10-08T07:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.157932 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.157981 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.157992 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.158014 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.158025 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:06Z","lastTransitionTime":"2025-10-08T07:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.261167 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.261237 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.261271 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.261292 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.261303 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:06Z","lastTransitionTime":"2025-10-08T07:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.362437 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:06 crc kubenswrapper[4693]: E1008 07:18:06.362617 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.364528 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.364603 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.364621 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.364647 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.364664 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:06Z","lastTransitionTime":"2025-10-08T07:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.468239 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.468288 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.468298 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.468316 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.468327 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:06Z","lastTransitionTime":"2025-10-08T07:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.571546 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.571600 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.571612 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.571633 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.571651 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:06Z","lastTransitionTime":"2025-10-08T07:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.674863 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.674908 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.674919 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.674937 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.674954 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:06Z","lastTransitionTime":"2025-10-08T07:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.778311 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.778352 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.778363 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.778381 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.778390 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:06Z","lastTransitionTime":"2025-10-08T07:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.880743 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.880812 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.880843 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.880866 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.880882 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:06Z","lastTransitionTime":"2025-10-08T07:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.984576 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.984620 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.984629 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.984650 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:06 crc kubenswrapper[4693]: I1008 07:18:06.984661 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:06Z","lastTransitionTime":"2025-10-08T07:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.087377 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.087425 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.087438 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.087458 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.087470 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:07Z","lastTransitionTime":"2025-10-08T07:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.190238 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.190304 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.190317 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.190339 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.190355 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:07Z","lastTransitionTime":"2025-10-08T07:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.292566 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.292628 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.292639 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.292659 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.292671 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:07Z","lastTransitionTime":"2025-10-08T07:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.362190 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.362244 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.362340 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:07 crc kubenswrapper[4693]: E1008 07:18:07.362358 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:07 crc kubenswrapper[4693]: E1008 07:18:07.362499 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:07 crc kubenswrapper[4693]: E1008 07:18:07.362601 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.395528 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.395590 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.395609 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.395635 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.395656 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:07Z","lastTransitionTime":"2025-10-08T07:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.498018 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.498083 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.498100 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.498125 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.498144 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:07Z","lastTransitionTime":"2025-10-08T07:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.601946 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.601998 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.602022 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.602041 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.602053 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:07Z","lastTransitionTime":"2025-10-08T07:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.704184 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.704257 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.704279 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.704307 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.704329 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:07Z","lastTransitionTime":"2025-10-08T07:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.807157 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.807201 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.807211 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.807229 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.807240 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:07Z","lastTransitionTime":"2025-10-08T07:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.909654 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.909702 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.909716 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.909737 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:07 crc kubenswrapper[4693]: I1008 07:18:07.909750 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:07Z","lastTransitionTime":"2025-10-08T07:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.013109 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.013180 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.013198 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.013232 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.013256 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:08Z","lastTransitionTime":"2025-10-08T07:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.116999 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.117090 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.117109 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.117140 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.117159 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:08Z","lastTransitionTime":"2025-10-08T07:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.221256 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.221323 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.221342 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.221376 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.221397 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:08Z","lastTransitionTime":"2025-10-08T07:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.323677 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.323749 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.323769 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.323806 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.323869 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:08Z","lastTransitionTime":"2025-10-08T07:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.362029 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:08 crc kubenswrapper[4693]: E1008 07:18:08.362280 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.427156 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.427216 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.427229 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.427253 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.427268 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:08Z","lastTransitionTime":"2025-10-08T07:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.531514 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.531584 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.531603 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.531633 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.531658 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:08Z","lastTransitionTime":"2025-10-08T07:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.635064 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.635181 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.635207 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.635241 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.635267 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:08Z","lastTransitionTime":"2025-10-08T07:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.738478 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.738539 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.738556 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.738583 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.738600 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:08Z","lastTransitionTime":"2025-10-08T07:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.841089 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.841149 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.841165 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.841191 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.841212 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:08Z","lastTransitionTime":"2025-10-08T07:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.943235 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.943306 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.943324 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.943349 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:08 crc kubenswrapper[4693]: I1008 07:18:08.943366 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:08Z","lastTransitionTime":"2025-10-08T07:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.045310 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.046737 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.046842 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.046967 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.047299 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:09Z","lastTransitionTime":"2025-10-08T07:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.150144 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.151220 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.151439 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.151654 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.151895 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:09Z","lastTransitionTime":"2025-10-08T07:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.255478 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.255780 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.256037 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.256847 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.257225 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:09Z","lastTransitionTime":"2025-10-08T07:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.360754 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.360812 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.360841 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.360863 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.360877 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:09Z","lastTransitionTime":"2025-10-08T07:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.362482 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.362495 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.362808 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:09 crc kubenswrapper[4693]: E1008 07:18:09.362948 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:09 crc kubenswrapper[4693]: E1008 07:18:09.363022 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:09 crc kubenswrapper[4693]: E1008 07:18:09.363115 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.363197 4693 scope.go:117] "RemoveContainer" containerID="884b809f73bb25122c848e6529f36ca0a71a8622cf6fc4be1d763957d1060107" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.463401 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.463480 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.463508 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.463594 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.463625 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:09Z","lastTransitionTime":"2025-10-08T07:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.566974 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.567020 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.567035 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.567089 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.567107 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:09Z","lastTransitionTime":"2025-10-08T07:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.670780 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.670908 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.670935 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.670967 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.670988 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:09Z","lastTransitionTime":"2025-10-08T07:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.774163 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.774239 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.774254 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.774282 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.774305 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:09Z","lastTransitionTime":"2025-10-08T07:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.871143 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-snt7l_379c61a3-51ff-4bdf-ab8b-5af8bf090716/ovnkube-controller/2.log" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.874788 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" event={"ID":"379c61a3-51ff-4bdf-ab8b-5af8bf090716","Type":"ContainerStarted","Data":"96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc"} Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.875424 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.876159 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.876208 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.876220 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.876239 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.876281 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:09Z","lastTransitionTime":"2025-10-08T07:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.893277 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:09Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.913372 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:09Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.927921 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07051e612172de072a51e3a44710a13a800e43665c0a76d52b1989d34cf85d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:18:03Z\\\",\\\"message\\\":\\\"2025-10-08T07:17:17+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_6e407c51-a7c1-42ce-86ca-3d5c4bf76d54\\\\n2025-10-08T07:17:17+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_6e407c51-a7c1-42ce-86ca-3d5c4bf76d54 to /host/opt/cni/bin/\\\\n2025-10-08T07:17:17Z [verbose] multus-daemon started\\\\n2025-10-08T07:17:17Z [verbose] Readiness Indicator file check\\\\n2025-10-08T07:18:02Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:18:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:09Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.941651 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:09Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.957870 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:09Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.970041 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:09Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.978748 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.978779 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.978791 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.978815 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.978901 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:09Z","lastTransitionTime":"2025-10-08T07:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.981764 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"94f930b8-435a-415e-a2a8-cf8d2d04e134\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f3a2c49f8c7746e20a39b3e78e8664106e022f11c5e988ac314e9332e4f6501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9eee3983dfb28c7de4e9c0ca587a70b65fec1e7c049e1ebe09e6948917e96da5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-t49dn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:09Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:09 crc kubenswrapper[4693]: I1008 07:18:09.995191 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac29deaa-d09f-4987-a04b-0b8f4188543c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7e1786f78d12b347add638d55c544fc801321848b51bf78229917eca9bf4441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd5c17f3c2d3acc27a1313d24b59f0fad8c0b57bdd789587fee6e4821bb8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b5cb5fac06e68b9604fb24cff750293a0545225bfecf0e60f748a1dde8de32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:09Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.006036 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:10Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.020168 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6773a0911b37e7426179f4daf47411d07b37431c9952fd3322c1a5abfb8cd10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:10Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.041263 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://884b809f73bb25122c848e6529f36ca0a71a8622cf6fc4be1d763957d1060107\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:17:44Z\\\",\\\"message\\\":\\\"V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1008 07:17:44.391870 6302 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1008 07:17:44.391862 6302 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/community-operators]} name:Service_openshift-marketplace/community-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.189:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d389393c-7ba9-422c-b3f5-06e391d537d2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1008 07:17:44.391894 6302 services_controller.go:445] Built service openshift-network-console/networking-console-plugin LB template configs for network=default: []services.lbConfig(nil)\\\\nF1008 07:17:44.391901 6302 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:10Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.056901 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:10Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.078259 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:10Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.081455 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.081523 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.081542 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.081570 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.081590 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:10Z","lastTransitionTime":"2025-10-08T07:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.099624 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:10Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.113862 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:10Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.128225 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:10Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.139437 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b2lbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f68f540-8d3f-4081-8c7e-cd5023991ada\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b2lbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:10Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.183691 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.183735 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.183750 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.183770 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.183783 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:10Z","lastTransitionTime":"2025-10-08T07:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.286119 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.286175 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.286195 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.286220 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.286238 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:10Z","lastTransitionTime":"2025-10-08T07:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.362598 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:10 crc kubenswrapper[4693]: E1008 07:18:10.362881 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.389085 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.389127 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.389140 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.389161 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.389173 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:10Z","lastTransitionTime":"2025-10-08T07:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.491923 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.491976 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.491989 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.492014 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.492028 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:10Z","lastTransitionTime":"2025-10-08T07:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.594786 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.595100 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.595172 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.595259 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.595375 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:10Z","lastTransitionTime":"2025-10-08T07:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.698149 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.698406 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.698492 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.698566 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.698630 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:10Z","lastTransitionTime":"2025-10-08T07:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.801966 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.802029 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.802047 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.802074 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.802096 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:10Z","lastTransitionTime":"2025-10-08T07:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.881402 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-snt7l_379c61a3-51ff-4bdf-ab8b-5af8bf090716/ovnkube-controller/3.log" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.882379 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-snt7l_379c61a3-51ff-4bdf-ab8b-5af8bf090716/ovnkube-controller/2.log" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.886461 4693 generic.go:334] "Generic (PLEG): container finished" podID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerID="96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc" exitCode=1 Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.886526 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" event={"ID":"379c61a3-51ff-4bdf-ab8b-5af8bf090716","Type":"ContainerDied","Data":"96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc"} Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.886589 4693 scope.go:117] "RemoveContainer" containerID="884b809f73bb25122c848e6529f36ca0a71a8622cf6fc4be1d763957d1060107" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.888010 4693 scope.go:117] "RemoveContainer" containerID="96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc" Oct 08 07:18:10 crc kubenswrapper[4693]: E1008 07:18:10.888352 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-snt7l_openshift-ovn-kubernetes(379c61a3-51ff-4bdf-ab8b-5af8bf090716)\"" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.905697 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.905744 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.905764 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.905789 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.905809 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:10Z","lastTransitionTime":"2025-10-08T07:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.912116 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:10Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.929512 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6773a0911b37e7426179f4daf47411d07b37431c9952fd3322c1a5abfb8cd10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:10Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.958726 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://884b809f73bb25122c848e6529f36ca0a71a8622cf6fc4be1d763957d1060107\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:17:44Z\\\",\\\"message\\\":\\\"V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1008 07:17:44.391870 6302 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1008 07:17:44.391862 6302 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/community-operators]} name:Service_openshift-marketplace/community-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.189:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d389393c-7ba9-422c-b3f5-06e391d537d2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1008 07:17:44.391894 6302 services_controller.go:445] Built service openshift-network-console/networking-console-plugin LB template configs for network=default: []services.lbConfig(nil)\\\\nF1008 07:17:44.391901 6302 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:18:10Z\\\",\\\"message\\\":\\\"e:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]} options:{GoMap:map[iface-id-ver:9d751cbb-f2e2-430d-9754-c882a5e924a5 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {960d98b2-dc64-4e93-a4b6-9b19847af71e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1008 07:18:10.346188 6652 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1008 07:18:10.346208 6652 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:10Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.973888 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:10Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:10 crc kubenswrapper[4693]: I1008 07:18:10.989555 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b2lbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f68f540-8d3f-4081-8c7e-cd5023991ada\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b2lbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:10Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.005316 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:11Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.009173 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.009250 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.009275 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.009312 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.009340 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:11Z","lastTransitionTime":"2025-10-08T07:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.024391 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:11Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.047432 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:11Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.061234 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:11Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.081053 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:11Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.097457 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:11Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.113168 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.113243 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.113267 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.113295 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.113321 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:11Z","lastTransitionTime":"2025-10-08T07:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.116145 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:11Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.136456 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07051e612172de072a51e3a44710a13a800e43665c0a76d52b1989d34cf85d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:18:03Z\\\",\\\"message\\\":\\\"2025-10-08T07:17:17+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_6e407c51-a7c1-42ce-86ca-3d5c4bf76d54\\\\n2025-10-08T07:17:17+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_6e407c51-a7c1-42ce-86ca-3d5c4bf76d54 to /host/opt/cni/bin/\\\\n2025-10-08T07:17:17Z [verbose] multus-daemon started\\\\n2025-10-08T07:17:17Z [verbose] Readiness Indicator file check\\\\n2025-10-08T07:18:02Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:18:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:11Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.153308 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac29deaa-d09f-4987-a04b-0b8f4188543c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7e1786f78d12b347add638d55c544fc801321848b51bf78229917eca9bf4441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd5c17f3c2d3acc27a1313d24b59f0fad8c0b57bdd789587fee6e4821bb8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b5cb5fac06e68b9604fb24cff750293a0545225bfecf0e60f748a1dde8de32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:11Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.168107 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:11Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.181618 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:11Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.199137 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"94f930b8-435a-415e-a2a8-cf8d2d04e134\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f3a2c49f8c7746e20a39b3e78e8664106e022f11c5e988ac314e9332e4f6501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9eee3983dfb28c7de4e9c0ca587a70b65fec1e7c049e1ebe09e6948917e96da5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-t49dn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:11Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.216737 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.216788 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.216805 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.216861 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.216881 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:11Z","lastTransitionTime":"2025-10-08T07:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.320067 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.320119 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.320131 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.320149 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.320161 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:11Z","lastTransitionTime":"2025-10-08T07:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.362156 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.362175 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:11 crc kubenswrapper[4693]: E1008 07:18:11.362321 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.362342 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:11 crc kubenswrapper[4693]: E1008 07:18:11.362440 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:11 crc kubenswrapper[4693]: E1008 07:18:11.362775 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.423181 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.423533 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.423681 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.423794 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.423949 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:11Z","lastTransitionTime":"2025-10-08T07:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.527534 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.527604 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.527621 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.527647 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.527664 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:11Z","lastTransitionTime":"2025-10-08T07:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.631568 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.631630 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.631642 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.631667 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.631683 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:11Z","lastTransitionTime":"2025-10-08T07:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.734704 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.734766 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.734778 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.734798 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.734838 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:11Z","lastTransitionTime":"2025-10-08T07:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.837533 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.837872 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.837948 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.838049 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.838133 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:11Z","lastTransitionTime":"2025-10-08T07:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.893779 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-snt7l_379c61a3-51ff-4bdf-ab8b-5af8bf090716/ovnkube-controller/3.log" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.897783 4693 scope.go:117] "RemoveContainer" containerID="96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc" Oct 08 07:18:11 crc kubenswrapper[4693]: E1008 07:18:11.898281 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-snt7l_openshift-ovn-kubernetes(379c61a3-51ff-4bdf-ab8b-5af8bf090716)\"" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.913543 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:11Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.928921 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:11Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.941739 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.941804 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.941868 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.941898 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.941939 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:11Z","lastTransitionTime":"2025-10-08T07:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.941978 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:11Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.953949 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:11Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.963599 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:11Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.976369 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b2lbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f68f540-8d3f-4081-8c7e-cd5023991ada\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b2lbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:11Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:11 crc kubenswrapper[4693]: I1008 07:18:11.990857 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:11Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.008430 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:12Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.025217 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:12Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.042511 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07051e612172de072a51e3a44710a13a800e43665c0a76d52b1989d34cf85d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:18:03Z\\\",\\\"message\\\":\\\"2025-10-08T07:17:17+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_6e407c51-a7c1-42ce-86ca-3d5c4bf76d54\\\\n2025-10-08T07:17:17+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_6e407c51-a7c1-42ce-86ca-3d5c4bf76d54 to /host/opt/cni/bin/\\\\n2025-10-08T07:17:17Z [verbose] multus-daemon started\\\\n2025-10-08T07:17:17Z [verbose] Readiness Indicator file check\\\\n2025-10-08T07:18:02Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:18:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:12Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.044098 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.044139 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.044150 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.044170 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.044184 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:12Z","lastTransitionTime":"2025-10-08T07:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.057396 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac29deaa-d09f-4987-a04b-0b8f4188543c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7e1786f78d12b347add638d55c544fc801321848b51bf78229917eca9bf4441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd5c17f3c2d3acc27a1313d24b59f0fad8c0b57bdd789587fee6e4821bb8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b5cb5fac06e68b9604fb24cff750293a0545225bfecf0e60f748a1dde8de32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:12Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.075417 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:12Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.090164 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:12Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.104974 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"94f930b8-435a-415e-a2a8-cf8d2d04e134\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f3a2c49f8c7746e20a39b3e78e8664106e022f11c5e988ac314e9332e4f6501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9eee3983dfb28c7de4e9c0ca587a70b65fec1e7c049e1ebe09e6948917e96da5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-t49dn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:12Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.119624 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:12Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.139700 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6773a0911b37e7426179f4daf47411d07b37431c9952fd3322c1a5abfb8cd10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:12Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.144800 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.144879 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.144897 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.144922 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.144937 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:12Z","lastTransitionTime":"2025-10-08T07:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.174690 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:18:10Z\\\",\\\"message\\\":\\\"e:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]} options:{GoMap:map[iface-id-ver:9d751cbb-f2e2-430d-9754-c882a5e924a5 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {960d98b2-dc64-4e93-a4b6-9b19847af71e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1008 07:18:10.346188 6652 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1008 07:18:10.346208 6652 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:18:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-snt7l_openshift-ovn-kubernetes(379c61a3-51ff-4bdf-ab8b-5af8bf090716)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:12Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:12 crc kubenswrapper[4693]: E1008 07:18:12.183460 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:12Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.189918 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.189967 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.189978 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.189999 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.190011 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:12Z","lastTransitionTime":"2025-10-08T07:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:12 crc kubenswrapper[4693]: E1008 07:18:12.206967 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:12Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.221785 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.221929 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.221944 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.221971 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.222007 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:12Z","lastTransitionTime":"2025-10-08T07:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:12 crc kubenswrapper[4693]: E1008 07:18:12.237002 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:12Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.240905 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.241222 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.241385 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.241524 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.241649 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:12Z","lastTransitionTime":"2025-10-08T07:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:12 crc kubenswrapper[4693]: E1008 07:18:12.282325 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:12Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.286483 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.286663 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.286787 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.286975 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.287107 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:12Z","lastTransitionTime":"2025-10-08T07:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:12 crc kubenswrapper[4693]: E1008 07:18:12.299405 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:12Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:12 crc kubenswrapper[4693]: E1008 07:18:12.299948 4693 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.301436 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.301519 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.301534 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.301560 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.301576 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:12Z","lastTransitionTime":"2025-10-08T07:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.362528 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:12 crc kubenswrapper[4693]: E1008 07:18:12.362909 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.376400 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.404082 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.404233 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.404306 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.404391 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.404459 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:12Z","lastTransitionTime":"2025-10-08T07:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.507175 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.507576 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.507692 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.507784 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.507894 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:12Z","lastTransitionTime":"2025-10-08T07:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.610744 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.610792 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.610807 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.610862 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.610880 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:12Z","lastTransitionTime":"2025-10-08T07:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.713564 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.713837 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.713917 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.714012 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.714096 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:12Z","lastTransitionTime":"2025-10-08T07:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.817898 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.817973 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.817993 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.818020 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.818041 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:12Z","lastTransitionTime":"2025-10-08T07:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.921131 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.921193 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.921210 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.921237 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:12 crc kubenswrapper[4693]: I1008 07:18:12.921257 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:12Z","lastTransitionTime":"2025-10-08T07:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.025333 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.025400 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.025423 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.025451 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.025470 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:13Z","lastTransitionTime":"2025-10-08T07:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.128926 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.129000 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.129021 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.129053 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.129077 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:13Z","lastTransitionTime":"2025-10-08T07:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.232230 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.232276 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.232289 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.232309 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.232321 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:13Z","lastTransitionTime":"2025-10-08T07:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.335516 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.335557 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.335566 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.335583 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.335595 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:13Z","lastTransitionTime":"2025-10-08T07:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.361929 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.362035 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:13 crc kubenswrapper[4693]: E1008 07:18:13.362104 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.362140 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:13 crc kubenswrapper[4693]: E1008 07:18:13.362333 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:13 crc kubenswrapper[4693]: E1008 07:18:13.362440 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.439270 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.439353 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.439377 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.439408 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.439434 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:13Z","lastTransitionTime":"2025-10-08T07:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.543495 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.543567 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.543587 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.543615 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.543638 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:13Z","lastTransitionTime":"2025-10-08T07:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.646710 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.646776 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.646790 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.646809 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.646835 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:13Z","lastTransitionTime":"2025-10-08T07:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.750767 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.750884 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.750906 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.750947 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.750966 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:13Z","lastTransitionTime":"2025-10-08T07:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.854912 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.854998 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.855024 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.855058 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.855079 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:13Z","lastTransitionTime":"2025-10-08T07:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.958012 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.958092 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.958110 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.958137 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:13 crc kubenswrapper[4693]: I1008 07:18:13.958162 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:13Z","lastTransitionTime":"2025-10-08T07:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.069693 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.069753 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.069769 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.069789 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.069805 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:14Z","lastTransitionTime":"2025-10-08T07:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.173318 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.173408 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.173431 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.173470 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.173530 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:14Z","lastTransitionTime":"2025-10-08T07:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.277382 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.277416 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.277425 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.277442 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.277453 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:14Z","lastTransitionTime":"2025-10-08T07:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.361979 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:14 crc kubenswrapper[4693]: E1008 07:18:14.362125 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.379566 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.379603 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.379614 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.379628 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.379641 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:14Z","lastTransitionTime":"2025-10-08T07:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.482613 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.482682 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.482701 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.482731 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.482762 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:14Z","lastTransitionTime":"2025-10-08T07:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.585289 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.585360 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.585380 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.585408 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.585428 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:14Z","lastTransitionTime":"2025-10-08T07:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.688838 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.688910 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.688928 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.688955 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.688974 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:14Z","lastTransitionTime":"2025-10-08T07:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.795116 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.795614 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.795626 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.795646 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.795661 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:14Z","lastTransitionTime":"2025-10-08T07:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.899619 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.899689 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.899712 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.899742 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:14 crc kubenswrapper[4693]: I1008 07:18:14.899763 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:14Z","lastTransitionTime":"2025-10-08T07:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.003118 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.003185 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.003204 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.003229 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.003248 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:15Z","lastTransitionTime":"2025-10-08T07:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.106927 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.106998 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.107021 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.107048 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.107067 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:15Z","lastTransitionTime":"2025-10-08T07:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.209552 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.209611 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.209631 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.209663 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.209683 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:15Z","lastTransitionTime":"2025-10-08T07:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.313538 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.313589 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.313604 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.313625 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.313637 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:15Z","lastTransitionTime":"2025-10-08T07:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.362519 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.362610 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.362700 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:15 crc kubenswrapper[4693]: E1008 07:18:15.362872 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:15 crc kubenswrapper[4693]: E1008 07:18:15.362997 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:15 crc kubenswrapper[4693]: E1008 07:18:15.363130 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.384987 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:15Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.403299 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"94f930b8-435a-415e-a2a8-cf8d2d04e134\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f3a2c49f8c7746e20a39b3e78e8664106e022f11c5e988ac314e9332e4f6501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9eee3983dfb28c7de4e9c0ca587a70b65fec1e7c049e1ebe09e6948917e96da5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-t49dn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:15Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.416509 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.416595 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.416614 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.416655 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.416674 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:15Z","lastTransitionTime":"2025-10-08T07:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.419431 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac29deaa-d09f-4987-a04b-0b8f4188543c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7e1786f78d12b347add638d55c544fc801321848b51bf78229917eca9bf4441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd5c17f3c2d3acc27a1313d24b59f0fad8c0b57bdd789587fee6e4821bb8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b5cb5fac06e68b9604fb24cff750293a0545225bfecf0e60f748a1dde8de32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:15Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.450244 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:15Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.474479 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6773a0911b37e7426179f4daf47411d07b37431c9952fd3322c1a5abfb8cd10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:15Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.501221 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:18:10Z\\\",\\\"message\\\":\\\"e:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]} options:{GoMap:map[iface-id-ver:9d751cbb-f2e2-430d-9754-c882a5e924a5 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {960d98b2-dc64-4e93-a4b6-9b19847af71e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1008 07:18:10.346188 6652 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1008 07:18:10.346208 6652 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:18:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-snt7l_openshift-ovn-kubernetes(379c61a3-51ff-4bdf-ab8b-5af8bf090716)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:15Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.515021 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82120147-a6b6-4dfa-b5c6-5a51359e9e47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ef29025877e988edc36110ef9e9ec8fa1fb612a9392b9f7b9cfa6b34d9f1dc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf1c6feb01dded41617cbf1e40114c9efa9cebf177f926365b1e2c1fd9bb7803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf1c6feb01dded41617cbf1e40114c9efa9cebf177f926365b1e2c1fd9bb7803\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:15Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.518988 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.519033 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.519045 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.519065 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.519079 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:15Z","lastTransitionTime":"2025-10-08T07:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.528415 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:15Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.541368 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:15Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.557631 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:15Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.574283 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:15Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.587699 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:15Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.604028 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b2lbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f68f540-8d3f-4081-8c7e-cd5023991ada\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b2lbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:15Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.622285 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.622711 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.622848 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.622950 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.622364 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:15Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.623029 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:15Z","lastTransitionTime":"2025-10-08T07:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.641514 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:15Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.658033 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07051e612172de072a51e3a44710a13a800e43665c0a76d52b1989d34cf85d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:18:03Z\\\",\\\"message\\\":\\\"2025-10-08T07:17:17+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_6e407c51-a7c1-42ce-86ca-3d5c4bf76d54\\\\n2025-10-08T07:17:17+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_6e407c51-a7c1-42ce-86ca-3d5c4bf76d54 to /host/opt/cni/bin/\\\\n2025-10-08T07:17:17Z [verbose] multus-daemon started\\\\n2025-10-08T07:17:17Z [verbose] Readiness Indicator file check\\\\n2025-10-08T07:18:02Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:18:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:15Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.674339 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:15Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.708245 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:15Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.727337 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.727383 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.727396 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.727416 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.727428 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:15Z","lastTransitionTime":"2025-10-08T07:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.830356 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.830483 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.830509 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.830540 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.830557 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:15Z","lastTransitionTime":"2025-10-08T07:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.934212 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.934283 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.934298 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.934321 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:15 crc kubenswrapper[4693]: I1008 07:18:15.934338 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:15Z","lastTransitionTime":"2025-10-08T07:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.037987 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.038047 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.038059 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.038080 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.038092 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:16Z","lastTransitionTime":"2025-10-08T07:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.142369 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.142427 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.142447 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.142480 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.142501 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:16Z","lastTransitionTime":"2025-10-08T07:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.246730 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.246834 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.246851 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.246896 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.246915 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:16Z","lastTransitionTime":"2025-10-08T07:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.350335 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.351646 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.352060 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.352226 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.352371 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:16Z","lastTransitionTime":"2025-10-08T07:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.362087 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:16 crc kubenswrapper[4693]: E1008 07:18:16.362663 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.456586 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.456647 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.456663 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.456685 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.456700 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:16Z","lastTransitionTime":"2025-10-08T07:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.559653 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.559993 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.560084 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.560161 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.560236 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:16Z","lastTransitionTime":"2025-10-08T07:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.664247 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.664334 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.664354 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.664387 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.664409 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:16Z","lastTransitionTime":"2025-10-08T07:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.767865 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.767942 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.767965 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.767996 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.768016 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:16Z","lastTransitionTime":"2025-10-08T07:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.871742 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.872152 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.872279 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.872354 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.872419 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:16Z","lastTransitionTime":"2025-10-08T07:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.975960 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.976428 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.976509 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.976650 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:16 crc kubenswrapper[4693]: I1008 07:18:16.976733 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:16Z","lastTransitionTime":"2025-10-08T07:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.080545 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.080620 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.080638 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.080668 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.080685 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:17Z","lastTransitionTime":"2025-10-08T07:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.183709 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.183864 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.183893 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.183965 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.183997 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:17Z","lastTransitionTime":"2025-10-08T07:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.287770 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.287871 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.287891 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.287920 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.287948 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:17Z","lastTransitionTime":"2025-10-08T07:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.362672 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.362754 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:17 crc kubenswrapper[4693]: E1008 07:18:17.362955 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.362967 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:17 crc kubenswrapper[4693]: E1008 07:18:17.363113 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:17 crc kubenswrapper[4693]: E1008 07:18:17.363281 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.391575 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.391648 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.391668 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.391695 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.391713 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:17Z","lastTransitionTime":"2025-10-08T07:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.495418 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.495518 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.495546 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.495620 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.495648 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:17Z","lastTransitionTime":"2025-10-08T07:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.599960 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.600031 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.600052 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.600081 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.600138 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:17Z","lastTransitionTime":"2025-10-08T07:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.703204 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.703260 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.703277 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.703307 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.703326 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:17Z","lastTransitionTime":"2025-10-08T07:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.807067 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.807617 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.807944 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.808147 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.808316 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:17Z","lastTransitionTime":"2025-10-08T07:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.916629 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.916675 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.917315 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.917340 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:17 crc kubenswrapper[4693]: I1008 07:18:17.917357 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:17Z","lastTransitionTime":"2025-10-08T07:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.020944 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.021003 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.021014 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.021037 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.021051 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:18Z","lastTransitionTime":"2025-10-08T07:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.125794 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.126411 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.126510 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.126702 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.126880 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:18Z","lastTransitionTime":"2025-10-08T07:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.229883 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.230276 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.230353 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.230434 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.230505 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:18Z","lastTransitionTime":"2025-10-08T07:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.333697 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.334177 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.334249 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.334330 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.334391 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:18Z","lastTransitionTime":"2025-10-08T07:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.362531 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:18 crc kubenswrapper[4693]: E1008 07:18:18.362780 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.438223 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.438288 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.438308 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.438334 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.438350 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:18Z","lastTransitionTime":"2025-10-08T07:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.541267 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.541524 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.541549 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.541581 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.541602 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:18Z","lastTransitionTime":"2025-10-08T07:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.645139 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.645191 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.645201 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.645242 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.645252 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:18Z","lastTransitionTime":"2025-10-08T07:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.747611 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.747702 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.747724 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.747754 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.747773 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:18Z","lastTransitionTime":"2025-10-08T07:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.851553 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.851618 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.851653 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.851673 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.851706 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:18Z","lastTransitionTime":"2025-10-08T07:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.954807 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.954908 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.954928 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.954954 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:18 crc kubenswrapper[4693]: I1008 07:18:18.954971 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:18Z","lastTransitionTime":"2025-10-08T07:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.058015 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.058071 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.058089 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.058117 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.058136 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:19Z","lastTransitionTime":"2025-10-08T07:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.088912 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:18:19 crc kubenswrapper[4693]: E1008 07:18:19.089153 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:23.089120604 +0000 UTC m=+148.460085549 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.161848 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.162318 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.162466 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.162627 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.162767 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:19Z","lastTransitionTime":"2025-10-08T07:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.190312 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.190373 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.190448 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.190494 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:19 crc kubenswrapper[4693]: E1008 07:18:19.190592 4693 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 08 07:18:19 crc kubenswrapper[4693]: E1008 07:18:19.190652 4693 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 08 07:18:19 crc kubenswrapper[4693]: E1008 07:18:19.190690 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 08 07:18:19 crc kubenswrapper[4693]: E1008 07:18:19.190736 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 08 07:18:19 crc kubenswrapper[4693]: E1008 07:18:19.190764 4693 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:18:19 crc kubenswrapper[4693]: E1008 07:18:19.190864 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 08 07:18:19 crc kubenswrapper[4693]: E1008 07:18:19.190922 4693 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 08 07:18:19 crc kubenswrapper[4693]: E1008 07:18:19.190773 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-08 07:19:23.19071941 +0000 UTC m=+148.561684575 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 08 07:18:19 crc kubenswrapper[4693]: E1008 07:18:19.190951 4693 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:18:19 crc kubenswrapper[4693]: E1008 07:18:19.190982 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-08 07:19:23.190952356 +0000 UTC m=+148.561917321 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 08 07:18:19 crc kubenswrapper[4693]: E1008 07:18:19.191017 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-08 07:19:23.191004617 +0000 UTC m=+148.561969592 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:18:19 crc kubenswrapper[4693]: E1008 07:18:19.191041 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-08 07:19:23.191028838 +0000 UTC m=+148.561993803 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.266547 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.266612 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.266630 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.266660 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.266680 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:19Z","lastTransitionTime":"2025-10-08T07:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.363190 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.363345 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:19 crc kubenswrapper[4693]: E1008 07:18:19.363439 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.363191 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:19 crc kubenswrapper[4693]: E1008 07:18:19.363585 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:19 crc kubenswrapper[4693]: E1008 07:18:19.363708 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.369584 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.369649 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.369662 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.369688 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.369707 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:19Z","lastTransitionTime":"2025-10-08T07:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.474033 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.474101 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.474113 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.474137 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.474152 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:19Z","lastTransitionTime":"2025-10-08T07:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.577307 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.577349 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.577359 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.577375 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.577385 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:19Z","lastTransitionTime":"2025-10-08T07:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.681252 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.681330 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.681350 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.681381 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.681405 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:19Z","lastTransitionTime":"2025-10-08T07:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.785571 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.785625 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.785646 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.785675 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.785699 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:19Z","lastTransitionTime":"2025-10-08T07:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.888633 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.888716 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.888727 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.888748 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.888761 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:19Z","lastTransitionTime":"2025-10-08T07:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.994504 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.994583 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.994607 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.994642 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:19 crc kubenswrapper[4693]: I1008 07:18:19.994667 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:19Z","lastTransitionTime":"2025-10-08T07:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.098632 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.099102 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.099115 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.099139 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.099157 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:20Z","lastTransitionTime":"2025-10-08T07:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.202016 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.202117 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.202135 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.202174 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.202194 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:20Z","lastTransitionTime":"2025-10-08T07:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.306070 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.306132 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.306150 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.306179 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.306198 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:20Z","lastTransitionTime":"2025-10-08T07:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.362345 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:20 crc kubenswrapper[4693]: E1008 07:18:20.362540 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.408845 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.408914 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.408941 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.408970 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.408987 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:20Z","lastTransitionTime":"2025-10-08T07:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.513723 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.513857 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.513880 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.513917 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.513950 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:20Z","lastTransitionTime":"2025-10-08T07:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.616724 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.616789 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.616833 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.616871 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.616891 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:20Z","lastTransitionTime":"2025-10-08T07:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.719995 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.720045 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.720056 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.720077 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.720089 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:20Z","lastTransitionTime":"2025-10-08T07:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.823909 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.823978 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.823996 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.824026 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.824047 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:20Z","lastTransitionTime":"2025-10-08T07:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.926858 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.926912 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.926928 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.926949 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:20 crc kubenswrapper[4693]: I1008 07:18:20.926963 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:20Z","lastTransitionTime":"2025-10-08T07:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.029700 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.029748 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.029766 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.029791 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.029848 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:21Z","lastTransitionTime":"2025-10-08T07:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.132581 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.132619 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.132629 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.132647 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.132658 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:21Z","lastTransitionTime":"2025-10-08T07:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.235276 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.235322 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.235338 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.235361 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.235379 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:21Z","lastTransitionTime":"2025-10-08T07:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.337499 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.337568 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.337587 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.337621 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.337643 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:21Z","lastTransitionTime":"2025-10-08T07:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.362033 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.362070 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.362148 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:21 crc kubenswrapper[4693]: E1008 07:18:21.362224 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:21 crc kubenswrapper[4693]: E1008 07:18:21.362367 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:21 crc kubenswrapper[4693]: E1008 07:18:21.362461 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.440690 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.440763 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.440781 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.440810 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.440857 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:21Z","lastTransitionTime":"2025-10-08T07:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.543751 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.544379 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.544463 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.544575 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.544651 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:21Z","lastTransitionTime":"2025-10-08T07:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.648373 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.648442 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.648461 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.648490 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.648510 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:21Z","lastTransitionTime":"2025-10-08T07:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.751043 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.751090 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.751100 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.751118 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.751134 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:21Z","lastTransitionTime":"2025-10-08T07:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.854219 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.854525 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.854598 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.854688 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.854772 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:21Z","lastTransitionTime":"2025-10-08T07:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.957271 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.957322 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.957331 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.957352 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:21 crc kubenswrapper[4693]: I1008 07:18:21.957369 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:21Z","lastTransitionTime":"2025-10-08T07:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.060010 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.060083 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.060100 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.060127 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.060148 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:22Z","lastTransitionTime":"2025-10-08T07:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.162882 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.162951 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.162968 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.162995 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.163015 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:22Z","lastTransitionTime":"2025-10-08T07:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.266002 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.266050 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.266061 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.266078 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.266090 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:22Z","lastTransitionTime":"2025-10-08T07:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.362695 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.362736 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:22 crc kubenswrapper[4693]: E1008 07:18:22.362933 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:22 crc kubenswrapper[4693]: E1008 07:18:22.363108 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.369061 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.369116 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.369134 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.369157 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.369174 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:22Z","lastTransitionTime":"2025-10-08T07:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.472127 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.472191 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.472209 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.472237 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.472259 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:22Z","lastTransitionTime":"2025-10-08T07:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.525338 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.525415 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.525441 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.525471 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.525491 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:22Z","lastTransitionTime":"2025-10-08T07:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:22 crc kubenswrapper[4693]: E1008 07:18:22.541130 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.545428 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.545499 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.545520 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.545544 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.545565 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:22Z","lastTransitionTime":"2025-10-08T07:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:22 crc kubenswrapper[4693]: E1008 07:18:22.559650 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.563796 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.563886 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.563903 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.563929 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.563947 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:22Z","lastTransitionTime":"2025-10-08T07:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:22 crc kubenswrapper[4693]: E1008 07:18:22.580371 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.587273 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.587334 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.587663 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.591154 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.591204 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:22Z","lastTransitionTime":"2025-10-08T07:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:22 crc kubenswrapper[4693]: E1008 07:18:22.612405 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.617552 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.617683 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.617764 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.617866 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.617950 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:22Z","lastTransitionTime":"2025-10-08T07:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:22 crc kubenswrapper[4693]: E1008 07:18:22.635305 4693 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1373c45-8137-4925-975b-395eb7ced486\\\",\\\"systemUUID\\\":\\\"553e2573-3cef-46f4-a622-139b36e277cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:22Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:22 crc kubenswrapper[4693]: E1008 07:18:22.635674 4693 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.637533 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.637632 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.637729 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.637855 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.637943 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:22Z","lastTransitionTime":"2025-10-08T07:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.741785 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.741882 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.741901 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.741931 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.741950 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:22Z","lastTransitionTime":"2025-10-08T07:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.844667 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.844778 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.844796 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.844845 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.844867 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:22Z","lastTransitionTime":"2025-10-08T07:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.947988 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.948048 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.948066 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.948095 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:22 crc kubenswrapper[4693]: I1008 07:18:22.948114 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:22Z","lastTransitionTime":"2025-10-08T07:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.050960 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.051002 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.051017 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.051038 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.051051 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:23Z","lastTransitionTime":"2025-10-08T07:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.154228 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.154301 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.154322 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.154357 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.154384 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:23Z","lastTransitionTime":"2025-10-08T07:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.256627 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.256730 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.256748 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.256773 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.256791 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:23Z","lastTransitionTime":"2025-10-08T07:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.359767 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.360109 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.360195 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.360280 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.360383 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:23Z","lastTransitionTime":"2025-10-08T07:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.362083 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.362111 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:23 crc kubenswrapper[4693]: E1008 07:18:23.362259 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:23 crc kubenswrapper[4693]: E1008 07:18:23.362385 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.463292 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.463323 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.463333 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.463351 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.463363 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:23Z","lastTransitionTime":"2025-10-08T07:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.565977 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.566003 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.566012 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.566027 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.566039 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:23Z","lastTransitionTime":"2025-10-08T07:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.668359 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.668409 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.668424 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.668443 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.668455 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:23Z","lastTransitionTime":"2025-10-08T07:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.771206 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.771269 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.771292 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.771318 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.771338 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:23Z","lastTransitionTime":"2025-10-08T07:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.874368 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.874429 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.874446 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.874473 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.874491 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:23Z","lastTransitionTime":"2025-10-08T07:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.977036 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.977118 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.977135 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.977163 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:23 crc kubenswrapper[4693]: I1008 07:18:23.977181 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:23Z","lastTransitionTime":"2025-10-08T07:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.087424 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.087974 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.088187 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.088352 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.088482 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:24Z","lastTransitionTime":"2025-10-08T07:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.192011 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.192061 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.192077 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.192128 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.192146 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:24Z","lastTransitionTime":"2025-10-08T07:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.295372 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.295763 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.295787 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.295838 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.295859 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:24Z","lastTransitionTime":"2025-10-08T07:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.362798 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.362873 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:24 crc kubenswrapper[4693]: E1008 07:18:24.363068 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:24 crc kubenswrapper[4693]: E1008 07:18:24.363230 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.364429 4693 scope.go:117] "RemoveContainer" containerID="96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc" Oct 08 07:18:24 crc kubenswrapper[4693]: E1008 07:18:24.364927 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-snt7l_openshift-ovn-kubernetes(379c61a3-51ff-4bdf-ab8b-5af8bf090716)\"" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.398736 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.398978 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.399119 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.399258 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.399391 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:24Z","lastTransitionTime":"2025-10-08T07:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.502523 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.502580 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.502597 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.502626 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.502645 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:24Z","lastTransitionTime":"2025-10-08T07:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.606015 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.606083 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.606101 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.606198 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.606228 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:24Z","lastTransitionTime":"2025-10-08T07:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.709999 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.710058 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.710071 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.710090 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.710105 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:24Z","lastTransitionTime":"2025-10-08T07:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.813437 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.813754 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.813771 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.813792 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.813804 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:24Z","lastTransitionTime":"2025-10-08T07:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.917057 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.917114 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.917132 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.917158 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:24 crc kubenswrapper[4693]: I1008 07:18:24.917174 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:24Z","lastTransitionTime":"2025-10-08T07:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.020580 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.020960 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.021244 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.021439 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.021777 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:25Z","lastTransitionTime":"2025-10-08T07:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.125113 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.125454 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.125610 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.125769 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.125960 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:25Z","lastTransitionTime":"2025-10-08T07:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.228866 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.229147 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.229227 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.229301 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.229368 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:25Z","lastTransitionTime":"2025-10-08T07:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.333212 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.333505 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.333584 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.333659 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.333722 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:25Z","lastTransitionTime":"2025-10-08T07:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.362133 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.362260 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:25 crc kubenswrapper[4693]: E1008 07:18:25.362327 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:25 crc kubenswrapper[4693]: E1008 07:18:25.362455 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.380851 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de50860831599cc5e290094b923c37b955d9ed5a35e13a732916824a2ee671ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b983eebee349f9881ee89e8a8626872e9d47a5131b2cf1a47f080d73d563bd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.394849 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.407625 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.417889 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5hs96" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ac0c2cb-0a21-403d-82d2-a484eec44d7b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d2f635e9cedbac4d8f31ff88bdf9ef6a9cb8b2ec1e3f49d9c3a43acb64d6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s985f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5hs96\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.428246 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2tpvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb57d74-a70a-40bf-98e4-3e075a97b049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://828b7d0bbcf5495ba4807f531f0a53874cd36904c4cb0128bcb6f4d333a35cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sm9qp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2tpvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.436972 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.437018 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.437028 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.437046 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.437058 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:25Z","lastTransitionTime":"2025-10-08T07:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.444476 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-b2lbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f68f540-8d3f-4081-8c7e-cd5023991ada\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nhc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-b2lbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.462554 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eed8e76-b07b-471c-acab-ad1ff014e9aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57479e019eed6c6c413d4aa85c8b002196f91a538ed961806e08fda4043decd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce2e5de15841f31c2fba72f78c46f98e1f7a998be006a709ace265843c334119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b39089401487d19a8c474f7fb9a81bda6447793bf99a314ccc8b66b36cddd415\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://854d92a9a18bcff5cdf6264252bc72126aee01cf5450d5a3b8790a576a50fe15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.483465 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a07ce7d-f7ad-46d0-860b-59e9216228f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73fb50dda064d9c24f373f4aea41d9f594b1518f07b21397bd7c8ecf98fa11da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8584ff11fa8079f06144865c7c99a1eb8eb386391c561613dd542cf65edd8471\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc48abaa46b695a1d7956927e0f550f8723002e92e9b05e13de5f2a441fa7ac3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://455a36e5ab2c34efacde7c7b80f4e7cd86120c36bcb97d97c256412d261d674e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b61db898f18cd4dc31cd1673fd3dc1ab7c3a7dcc0b82009646c80c7efbce0d2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-08T07:17:14Z\\\",\\\"message\\\":\\\"tension-apiserver-authentication::requestheader-client-ca-file\\\\nI1008 07:17:14.637648 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759907818\\\\\\\\\\\\\\\" (2025-10-08 07:16:57 +0000 UTC to 2025-11-07 07:16:58 +0000 UTC (now=2025-10-08 07:17:14.637595847 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.637703 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1008 07:17:14.637727 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1008 07:17:14.637864 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-504443311/tls.crt::/tmp/serving-cert-504443311/tls.key\\\\\\\"\\\\nI1008 07:17:14.637974 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759907829\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759907829\\\\\\\\\\\\\\\" (2025-10-08 06:17:08 +0000 UTC to 2026-10-08 06:17:08 +0000 UTC (now=2025-10-08 07:17:14.637939217 +0000 UTC))\\\\\\\"\\\\nI1008 07:17:14.638004 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1008 07:17:14.638039 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1008 07:17:14.638067 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1008 07:17:14.640576 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640691 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1008 07:17:14.640912 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1008 07:17:14.667416 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7869495e123d687ba68454c184e190e2d393634509e8983b3fe14b6197ead9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://192eef2b08814511fccc6e73410b33f5a677a31cb4e9e2a86f643e5dab4b1d77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.497558 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.517070 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bfhs8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ddc214e-6569-4b0e-8783-f484a001ce6a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:18:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07051e612172de072a51e3a44710a13a800e43665c0a76d52b1989d34cf85d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:18:03Z\\\",\\\"message\\\":\\\"2025-10-08T07:17:17+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_6e407c51-a7c1-42ce-86ca-3d5c4bf76d54\\\\n2025-10-08T07:17:17+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_6e407c51-a7c1-42ce-86ca-3d5c4bf76d54 to /host/opt/cni/bin/\\\\n2025-10-08T07:17:17Z [verbose] multus-daemon started\\\\n2025-10-08T07:17:17Z [verbose] Readiness Indicator file check\\\\n2025-10-08T07:18:02Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:18:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvtx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bfhs8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.533381 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac29deaa-d09f-4987-a04b-0b8f4188543c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7e1786f78d12b347add638d55c544fc801321848b51bf78229917eca9bf4441\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd5c17f3c2d3acc27a1313d24b59f0fad8c0b57bdd789587fee6e4821bb8fe5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b5cb5fac06e68b9604fb24cff750293a0545225bfecf0e60f748a1dde8de32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae6974b2219e6941e8ed49c0b7fc0e4e34c006906b12c7ca916544897b413161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.539100 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.539132 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.539142 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.539158 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.539196 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:25Z","lastTransitionTime":"2025-10-08T07:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.551573 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca6f40749c37865827ae2cb69ed7a5a3c03341756503c0dc3bc21455591f5b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.563878 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adebc2b6-0bd4-4c1c-8b8f-68a98012f490\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c3ec342c5478c4ee142064d5ed5a7eff144cb029b1bd6d43519634664887cbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cx99l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xwrvr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.579733 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"94f930b8-435a-415e-a2a8-cf8d2d04e134\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f3a2c49f8c7746e20a39b3e78e8664106e022f11c5e988ac314e9332e4f6501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9eee3983dfb28c7de4e9c0ca587a70b65fec1e7c049e1ebe09e6948917e96da5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjzvs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-t49dn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.593788 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82120147-a6b6-4dfa-b5c6-5a51359e9e47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ef29025877e988edc36110ef9e9ec8fa1fb612a9392b9f7b9cfa6b34d9f1dc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf1c6feb01dded41617cbf1e40114c9efa9cebf177f926365b1e2c1fd9bb7803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf1c6feb01dded41617cbf1e40114c9efa9cebf177f926365b1e2c1fd9bb7803\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:16:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:16:55Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.608751 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c12698a5964175f3d32c47bf034f47b383c3793117fd0c69388e2a67cea6b670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.624827 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e017f3d-4e13-489e-9e9d-b5e6ec1d626a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6773a0911b37e7426179f4daf47411d07b37431c9952fd3322c1a5abfb8cd10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bc13c02af51af2506c52c7dcd8e2684e4f6e2ff01292f8d2ced3bdaf36a2198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c17e4817e81e56d59c0c523d722fc3de4dcf2ce98aefe5e19563ce7c2295baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6944d5a1536a6039780e9e02d8fd641f6cdbdedb6d4085f032c98da49808f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c44d99ff5f770a6124960dd7d0ece10ec7faa040476e33a6be8f87f290b8cc0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ac5aa0c134e2a0210da6ad4468adb683297e398b0756376557f5895ca42e0ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df79eff32fefb88d6e882efb26dcff465644c70bd0fad33a167c8984cb9b7b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qv25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f7fh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.642461 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.642523 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.642539 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.642559 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.642576 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:25Z","lastTransitionTime":"2025-10-08T07:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.647004 4693 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"379c61a3-51ff-4bdf-ab8b-5af8bf090716\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-08T07:18:10Z\\\",\\\"message\\\":\\\"e:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]} options:{GoMap:map[iface-id-ver:9d751cbb-f2e2-430d-9754-c882a5e924a5 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {960d98b2-dc64-4e93-a4b6-9b19847af71e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1008 07:18:10.346188 6652 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1008 07:18:10.346208 6652 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-08T07:18:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-snt7l_openshift-ovn-kubernetes(379c61a3-51ff-4bdf-ab8b-5af8bf090716)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-08T07:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-08T07:17:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-08T07:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqs6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-08T07:17:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-snt7l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-08T07:18:25Z is after 2025-08-24T17:21:41Z" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.745768 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.745847 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.745860 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.745884 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.745900 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:25Z","lastTransitionTime":"2025-10-08T07:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.850656 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.850745 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.850773 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.850808 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.850863 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:25Z","lastTransitionTime":"2025-10-08T07:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.954150 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.954216 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.954229 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.954250 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:25 crc kubenswrapper[4693]: I1008 07:18:25.954264 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:25Z","lastTransitionTime":"2025-10-08T07:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.057180 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.057256 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.057273 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.057306 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.057326 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:26Z","lastTransitionTime":"2025-10-08T07:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.160179 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.160228 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.160240 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.160258 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.160272 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:26Z","lastTransitionTime":"2025-10-08T07:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.263460 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.263502 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.263511 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.263527 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.263538 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:26Z","lastTransitionTime":"2025-10-08T07:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.362045 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:26 crc kubenswrapper[4693]: E1008 07:18:26.362271 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.362384 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:26 crc kubenswrapper[4693]: E1008 07:18:26.362478 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.366515 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.366573 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.366592 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.366618 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.366635 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:26Z","lastTransitionTime":"2025-10-08T07:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.469889 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.470406 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.470519 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.470610 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.470693 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:26Z","lastTransitionTime":"2025-10-08T07:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.573004 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.573394 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.573470 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.573559 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.573638 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:26Z","lastTransitionTime":"2025-10-08T07:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.676393 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.676445 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.676464 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.676489 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.676509 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:26Z","lastTransitionTime":"2025-10-08T07:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.779505 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.779604 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.779624 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.779649 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.779663 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:26Z","lastTransitionTime":"2025-10-08T07:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.882744 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.882827 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.882839 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.882858 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.882874 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:26Z","lastTransitionTime":"2025-10-08T07:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.985594 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.985649 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.985659 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.985676 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:26 crc kubenswrapper[4693]: I1008 07:18:26.985688 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:26Z","lastTransitionTime":"2025-10-08T07:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.088643 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.088694 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.088706 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.088726 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.088740 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:27Z","lastTransitionTime":"2025-10-08T07:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.192472 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.192534 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.192550 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.192574 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.192586 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:27Z","lastTransitionTime":"2025-10-08T07:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.296132 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.296193 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.296206 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.296227 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.296238 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:27Z","lastTransitionTime":"2025-10-08T07:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.362985 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.362985 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:27 crc kubenswrapper[4693]: E1008 07:18:27.363204 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:27 crc kubenswrapper[4693]: E1008 07:18:27.363512 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.399170 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.399213 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.399224 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.399240 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.399252 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:27Z","lastTransitionTime":"2025-10-08T07:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.502647 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.502693 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.502704 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.502721 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.502736 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:27Z","lastTransitionTime":"2025-10-08T07:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.605637 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.605684 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.605698 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.605719 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.605732 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:27Z","lastTransitionTime":"2025-10-08T07:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.709241 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.709284 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.709296 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.709314 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.709329 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:27Z","lastTransitionTime":"2025-10-08T07:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.812704 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.812805 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.813109 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.813143 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.813155 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:27Z","lastTransitionTime":"2025-10-08T07:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.915511 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.915553 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.915564 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.915582 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:27 crc kubenswrapper[4693]: I1008 07:18:27.915594 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:27Z","lastTransitionTime":"2025-10-08T07:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.018657 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.018699 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.018710 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.018726 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.018737 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:28Z","lastTransitionTime":"2025-10-08T07:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.121129 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.121191 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.121212 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.121242 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.121261 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:28Z","lastTransitionTime":"2025-10-08T07:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.224355 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.224424 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.224440 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.224466 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.224487 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:28Z","lastTransitionTime":"2025-10-08T07:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.327181 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.327237 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.327247 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.327273 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.327286 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:28Z","lastTransitionTime":"2025-10-08T07:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.361922 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:28 crc kubenswrapper[4693]: E1008 07:18:28.362057 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.361928 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:28 crc kubenswrapper[4693]: E1008 07:18:28.362222 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.430854 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.430905 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.430921 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.430944 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.430962 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:28Z","lastTransitionTime":"2025-10-08T07:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.534051 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.534128 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.534156 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.534190 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.534222 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:28Z","lastTransitionTime":"2025-10-08T07:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.636586 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.636671 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.636701 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.636733 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.636757 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:28Z","lastTransitionTime":"2025-10-08T07:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.740775 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.740900 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.740948 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.740987 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.741022 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:28Z","lastTransitionTime":"2025-10-08T07:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.844661 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.844734 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.844753 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.844780 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.844800 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:28Z","lastTransitionTime":"2025-10-08T07:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.948448 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.948521 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.948541 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.948569 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:28 crc kubenswrapper[4693]: I1008 07:18:28.948590 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:28Z","lastTransitionTime":"2025-10-08T07:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.051307 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.051396 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.051419 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.051448 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.051466 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:29Z","lastTransitionTime":"2025-10-08T07:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.155500 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.155569 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.155586 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.155613 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.155634 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:29Z","lastTransitionTime":"2025-10-08T07:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.259170 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.259280 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.259304 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.259339 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.259365 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:29Z","lastTransitionTime":"2025-10-08T07:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.361931 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.361968 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.361980 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.362000 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.361992 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.362014 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:29Z","lastTransitionTime":"2025-10-08T07:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.362143 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:29 crc kubenswrapper[4693]: E1008 07:18:29.362235 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:29 crc kubenswrapper[4693]: E1008 07:18:29.362451 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.465641 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.465718 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.465740 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.465781 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.465805 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:29Z","lastTransitionTime":"2025-10-08T07:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.569606 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.569661 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.569672 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.569691 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.569705 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:29Z","lastTransitionTime":"2025-10-08T07:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.673441 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.673515 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.673537 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.673571 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.673600 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:29Z","lastTransitionTime":"2025-10-08T07:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.777356 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.777413 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.777425 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.777446 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.777460 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:29Z","lastTransitionTime":"2025-10-08T07:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.880662 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.880728 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.880745 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.880770 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.880789 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:29Z","lastTransitionTime":"2025-10-08T07:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.983879 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.983951 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.983971 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.984000 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:29 crc kubenswrapper[4693]: I1008 07:18:29.984020 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:29Z","lastTransitionTime":"2025-10-08T07:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.087535 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.087601 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.087613 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.087631 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.087644 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:30Z","lastTransitionTime":"2025-10-08T07:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.191078 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.191147 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.191160 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.191180 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.191193 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:30Z","lastTransitionTime":"2025-10-08T07:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.293924 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.294006 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.294015 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.294034 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.294049 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:30Z","lastTransitionTime":"2025-10-08T07:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.362847 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:30 crc kubenswrapper[4693]: E1008 07:18:30.362958 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.363030 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:30 crc kubenswrapper[4693]: E1008 07:18:30.363113 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.397922 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.397983 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.397993 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.398012 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.398025 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:30Z","lastTransitionTime":"2025-10-08T07:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.502696 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.502995 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.503034 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.503061 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.503082 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:30Z","lastTransitionTime":"2025-10-08T07:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.606579 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.606648 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.606666 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.606693 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.606712 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:30Z","lastTransitionTime":"2025-10-08T07:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.710494 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.710565 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.710586 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.710616 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.710637 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:30Z","lastTransitionTime":"2025-10-08T07:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.814542 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.814652 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.814748 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.815276 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.815320 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:30Z","lastTransitionTime":"2025-10-08T07:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.918728 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.918785 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.918795 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.918830 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:30 crc kubenswrapper[4693]: I1008 07:18:30.918842 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:30Z","lastTransitionTime":"2025-10-08T07:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.021985 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.022046 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.022056 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.022076 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.022089 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:31Z","lastTransitionTime":"2025-10-08T07:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.124602 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.124652 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.124662 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.124681 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.124693 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:31Z","lastTransitionTime":"2025-10-08T07:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.231440 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.231521 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.231537 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.231560 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.231582 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:31Z","lastTransitionTime":"2025-10-08T07:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.335171 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.335226 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.335245 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.335269 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.335288 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:31Z","lastTransitionTime":"2025-10-08T07:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.362199 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.362353 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:31 crc kubenswrapper[4693]: E1008 07:18:31.362537 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:31 crc kubenswrapper[4693]: E1008 07:18:31.362934 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.438350 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.438413 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.438425 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.438445 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.438457 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:31Z","lastTransitionTime":"2025-10-08T07:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.541275 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.541350 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.541368 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.541395 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.541413 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:31Z","lastTransitionTime":"2025-10-08T07:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.645065 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.645151 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.645212 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.645272 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.645299 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:31Z","lastTransitionTime":"2025-10-08T07:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.748752 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.748845 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.748863 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.748894 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.748913 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:31Z","lastTransitionTime":"2025-10-08T07:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.852543 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.852625 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.852650 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.852679 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.852767 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:31Z","lastTransitionTime":"2025-10-08T07:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.955480 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.955529 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.955538 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.955555 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:31 crc kubenswrapper[4693]: I1008 07:18:31.955564 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:31Z","lastTransitionTime":"2025-10-08T07:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.058487 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.058571 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.058589 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.058621 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.058644 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:32Z","lastTransitionTime":"2025-10-08T07:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.162001 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.162084 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.162107 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.162138 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.162161 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:32Z","lastTransitionTime":"2025-10-08T07:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.266223 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.266297 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.266321 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.266352 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.266373 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:32Z","lastTransitionTime":"2025-10-08T07:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.362346 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.362467 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:32 crc kubenswrapper[4693]: E1008 07:18:32.362629 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:32 crc kubenswrapper[4693]: E1008 07:18:32.363176 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.369084 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.369156 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.369175 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.369204 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.369221 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:32Z","lastTransitionTime":"2025-10-08T07:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.472098 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.472151 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.472163 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.472182 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.472195 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:32Z","lastTransitionTime":"2025-10-08T07:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.575076 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.575137 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.575153 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.575178 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.575198 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:32Z","lastTransitionTime":"2025-10-08T07:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.678316 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.678381 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.678396 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.678444 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.678459 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:32Z","lastTransitionTime":"2025-10-08T07:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.781720 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.781782 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.781795 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.781839 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.781854 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:32Z","lastTransitionTime":"2025-10-08T07:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.884884 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.884950 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.884964 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.884984 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.885000 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:32Z","lastTransitionTime":"2025-10-08T07:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.987477 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.987542 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.987554 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.987574 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:32 crc kubenswrapper[4693]: I1008 07:18:32.987590 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:32Z","lastTransitionTime":"2025-10-08T07:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.033448 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.033517 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.033534 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.033562 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.033580 4693 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-08T07:18:33Z","lastTransitionTime":"2025-10-08T07:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.099733 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-wcw8l"] Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.100356 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wcw8l" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.103125 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.103541 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.103772 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.104012 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.160928 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a426d2a2-df91-4139-8401-783fff5fbaef-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-wcw8l\" (UID: \"a426d2a2-df91-4139-8401-783fff5fbaef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wcw8l" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.161017 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a426d2a2-df91-4139-8401-783fff5fbaef-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-wcw8l\" (UID: \"a426d2a2-df91-4139-8401-783fff5fbaef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wcw8l" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.161561 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a426d2a2-df91-4139-8401-783fff5fbaef-service-ca\") pod \"cluster-version-operator-5c965bbfc6-wcw8l\" (UID: \"a426d2a2-df91-4139-8401-783fff5fbaef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wcw8l" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.161764 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a426d2a2-df91-4139-8401-783fff5fbaef-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-wcw8l\" (UID: \"a426d2a2-df91-4139-8401-783fff5fbaef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wcw8l" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.161896 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a426d2a2-df91-4139-8401-783fff5fbaef-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-wcw8l\" (UID: \"a426d2a2-df91-4139-8401-783fff5fbaef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wcw8l" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.190719 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-f7fh5" podStartSLOduration=78.190695959 podStartE2EDuration="1m18.190695959s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:18:33.146302324 +0000 UTC m=+98.517267279" watchObservedRunningTime="2025-10-08 07:18:33.190695959 +0000 UTC m=+98.561660894" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.222075 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=21.222037495 podStartE2EDuration="21.222037495s" podCreationTimestamp="2025-10-08 07:18:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:18:33.221905081 +0000 UTC m=+98.592870076" watchObservedRunningTime="2025-10-08 07:18:33.222037495 +0000 UTC m=+98.593002430" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.263044 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a426d2a2-df91-4139-8401-783fff5fbaef-service-ca\") pod \"cluster-version-operator-5c965bbfc6-wcw8l\" (UID: \"a426d2a2-df91-4139-8401-783fff5fbaef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wcw8l" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.263162 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a426d2a2-df91-4139-8401-783fff5fbaef-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-wcw8l\" (UID: \"a426d2a2-df91-4139-8401-783fff5fbaef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wcw8l" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.263200 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a426d2a2-df91-4139-8401-783fff5fbaef-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-wcw8l\" (UID: \"a426d2a2-df91-4139-8401-783fff5fbaef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wcw8l" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.263257 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a426d2a2-df91-4139-8401-783fff5fbaef-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-wcw8l\" (UID: \"a426d2a2-df91-4139-8401-783fff5fbaef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wcw8l" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.263315 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a426d2a2-df91-4139-8401-783fff5fbaef-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-wcw8l\" (UID: \"a426d2a2-df91-4139-8401-783fff5fbaef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wcw8l" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.263379 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a426d2a2-df91-4139-8401-783fff5fbaef-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-wcw8l\" (UID: \"a426d2a2-df91-4139-8401-783fff5fbaef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wcw8l" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.263515 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a426d2a2-df91-4139-8401-783fff5fbaef-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-wcw8l\" (UID: \"a426d2a2-df91-4139-8401-783fff5fbaef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wcw8l" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.264519 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a426d2a2-df91-4139-8401-783fff5fbaef-service-ca\") pod \"cluster-version-operator-5c965bbfc6-wcw8l\" (UID: \"a426d2a2-df91-4139-8401-783fff5fbaef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wcw8l" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.280064 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a426d2a2-df91-4139-8401-783fff5fbaef-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-wcw8l\" (UID: \"a426d2a2-df91-4139-8401-783fff5fbaef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wcw8l" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.288350 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-5hs96" podStartSLOduration=78.288317505 podStartE2EDuration="1m18.288317505s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:18:33.287632476 +0000 UTC m=+98.658597451" watchObservedRunningTime="2025-10-08 07:18:33.288317505 +0000 UTC m=+98.659282450" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.293534 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a426d2a2-df91-4139-8401-783fff5fbaef-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-wcw8l\" (UID: \"a426d2a2-df91-4139-8401-783fff5fbaef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wcw8l" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.303233 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-2tpvg" podStartSLOduration=78.303205776 podStartE2EDuration="1m18.303205776s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:18:33.302834926 +0000 UTC m=+98.673799861" watchObservedRunningTime="2025-10-08 07:18:33.303205776 +0000 UTC m=+98.674170731" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.334731 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=78.334707656 podStartE2EDuration="1m18.334707656s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:18:33.334610573 +0000 UTC m=+98.705575528" watchObservedRunningTime="2025-10-08 07:18:33.334707656 +0000 UTC m=+98.705672591" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.362259 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:33 crc kubenswrapper[4693]: E1008 07:18:33.362515 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.362756 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:33 crc kubenswrapper[4693]: E1008 07:18:33.363046 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.376515 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-bfhs8" podStartSLOduration=78.37649452 podStartE2EDuration="1m18.37649452s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:18:33.37578836 +0000 UTC m=+98.746753305" watchObservedRunningTime="2025-10-08 07:18:33.37649452 +0000 UTC m=+98.747459455" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.394059 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=78.394042384 podStartE2EDuration="1m18.394042384s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:18:33.393067417 +0000 UTC m=+98.764032352" watchObservedRunningTime="2025-10-08 07:18:33.394042384 +0000 UTC m=+98.765007319" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.415669 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wcw8l" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.428638 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podStartSLOduration=78.428609228 podStartE2EDuration="1m18.428609228s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:18:33.427009124 +0000 UTC m=+98.797974099" watchObservedRunningTime="2025-10-08 07:18:33.428609228 +0000 UTC m=+98.799574173" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.446011 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-t49dn" podStartSLOduration=77.445986578 podStartE2EDuration="1m17.445986578s" podCreationTimestamp="2025-10-08 07:17:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:18:33.445141415 +0000 UTC m=+98.816106370" watchObservedRunningTime="2025-10-08 07:18:33.445986578 +0000 UTC m=+98.816951523" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.461984 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=50.461957909 podStartE2EDuration="50.461957909s" podCreationTimestamp="2025-10-08 07:17:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:18:33.461080135 +0000 UTC m=+98.832045120" watchObservedRunningTime="2025-10-08 07:18:33.461957909 +0000 UTC m=+98.832922844" Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.983582 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wcw8l" event={"ID":"a426d2a2-df91-4139-8401-783fff5fbaef","Type":"ContainerStarted","Data":"63d67b8f58c75d0313a4307f23601672b7e87f3740bb8a9fcb90e15b430038c5"} Oct 08 07:18:33 crc kubenswrapper[4693]: I1008 07:18:33.984383 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wcw8l" event={"ID":"a426d2a2-df91-4139-8401-783fff5fbaef","Type":"ContainerStarted","Data":"d69affd10e1b9fe715b71f796dde71a0c650e195435cd1aaf6ab526cbe0cf11a"} Oct 08 07:18:34 crc kubenswrapper[4693]: I1008 07:18:34.002172 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wcw8l" podStartSLOduration=79.002133064 podStartE2EDuration="1m19.002133064s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:18:34.000697924 +0000 UTC m=+99.371662929" watchObservedRunningTime="2025-10-08 07:18:34.002133064 +0000 UTC m=+99.373098029" Oct 08 07:18:34 crc kubenswrapper[4693]: I1008 07:18:34.362716 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:34 crc kubenswrapper[4693]: I1008 07:18:34.362862 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:34 crc kubenswrapper[4693]: E1008 07:18:34.363007 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:34 crc kubenswrapper[4693]: E1008 07:18:34.363183 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:34 crc kubenswrapper[4693]: I1008 07:18:34.482982 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0f68f540-8d3f-4081-8c7e-cd5023991ada-metrics-certs\") pod \"network-metrics-daemon-b2lbv\" (UID: \"0f68f540-8d3f-4081-8c7e-cd5023991ada\") " pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:34 crc kubenswrapper[4693]: E1008 07:18:34.483328 4693 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 08 07:18:34 crc kubenswrapper[4693]: E1008 07:18:34.483470 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0f68f540-8d3f-4081-8c7e-cd5023991ada-metrics-certs podName:0f68f540-8d3f-4081-8c7e-cd5023991ada nodeName:}" failed. No retries permitted until 2025-10-08 07:19:38.483435472 +0000 UTC m=+163.854400447 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0f68f540-8d3f-4081-8c7e-cd5023991ada-metrics-certs") pod "network-metrics-daemon-b2lbv" (UID: "0f68f540-8d3f-4081-8c7e-cd5023991ada") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 08 07:18:35 crc kubenswrapper[4693]: I1008 07:18:35.363177 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:35 crc kubenswrapper[4693]: I1008 07:18:35.363341 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:35 crc kubenswrapper[4693]: E1008 07:18:35.364355 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:35 crc kubenswrapper[4693]: E1008 07:18:35.364587 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:36 crc kubenswrapper[4693]: I1008 07:18:36.362595 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:36 crc kubenswrapper[4693]: I1008 07:18:36.362717 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:36 crc kubenswrapper[4693]: E1008 07:18:36.362752 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:36 crc kubenswrapper[4693]: E1008 07:18:36.363018 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:37 crc kubenswrapper[4693]: I1008 07:18:37.362512 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:37 crc kubenswrapper[4693]: E1008 07:18:37.362669 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:37 crc kubenswrapper[4693]: I1008 07:18:37.362924 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:37 crc kubenswrapper[4693]: E1008 07:18:37.362983 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:38 crc kubenswrapper[4693]: I1008 07:18:38.361920 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:38 crc kubenswrapper[4693]: E1008 07:18:38.362108 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:38 crc kubenswrapper[4693]: I1008 07:18:38.361920 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:38 crc kubenswrapper[4693]: E1008 07:18:38.362449 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:39 crc kubenswrapper[4693]: I1008 07:18:39.363043 4693 scope.go:117] "RemoveContainer" containerID="96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc" Oct 08 07:18:39 crc kubenswrapper[4693]: I1008 07:18:39.363134 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:39 crc kubenswrapper[4693]: E1008 07:18:39.363237 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-snt7l_openshift-ovn-kubernetes(379c61a3-51ff-4bdf-ab8b-5af8bf090716)\"" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" Oct 08 07:18:39 crc kubenswrapper[4693]: I1008 07:18:39.363247 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:39 crc kubenswrapper[4693]: E1008 07:18:39.363339 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:39 crc kubenswrapper[4693]: E1008 07:18:39.363437 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:40 crc kubenswrapper[4693]: I1008 07:18:40.362408 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:40 crc kubenswrapper[4693]: I1008 07:18:40.362449 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:40 crc kubenswrapper[4693]: E1008 07:18:40.362589 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:40 crc kubenswrapper[4693]: E1008 07:18:40.363118 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:40 crc kubenswrapper[4693]: I1008 07:18:40.381679 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Oct 08 07:18:41 crc kubenswrapper[4693]: I1008 07:18:41.362133 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:41 crc kubenswrapper[4693]: E1008 07:18:41.362284 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:41 crc kubenswrapper[4693]: I1008 07:18:41.362122 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:41 crc kubenswrapper[4693]: E1008 07:18:41.362769 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:42 crc kubenswrapper[4693]: I1008 07:18:42.362246 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:42 crc kubenswrapper[4693]: I1008 07:18:42.362269 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:42 crc kubenswrapper[4693]: E1008 07:18:42.362656 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:42 crc kubenswrapper[4693]: E1008 07:18:42.362887 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:43 crc kubenswrapper[4693]: I1008 07:18:43.362795 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:43 crc kubenswrapper[4693]: I1008 07:18:43.362924 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:43 crc kubenswrapper[4693]: E1008 07:18:43.363018 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:43 crc kubenswrapper[4693]: E1008 07:18:43.363096 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:44 crc kubenswrapper[4693]: I1008 07:18:44.362513 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:44 crc kubenswrapper[4693]: I1008 07:18:44.362755 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:44 crc kubenswrapper[4693]: E1008 07:18:44.363251 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:44 crc kubenswrapper[4693]: E1008 07:18:44.363148 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:45 crc kubenswrapper[4693]: I1008 07:18:45.362486 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:45 crc kubenswrapper[4693]: I1008 07:18:45.362556 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:45 crc kubenswrapper[4693]: E1008 07:18:45.364545 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:45 crc kubenswrapper[4693]: E1008 07:18:45.364682 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:45 crc kubenswrapper[4693]: I1008 07:18:45.397225 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=5.397199588 podStartE2EDuration="5.397199588s" podCreationTimestamp="2025-10-08 07:18:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:18:45.39580951 +0000 UTC m=+110.766774475" watchObservedRunningTime="2025-10-08 07:18:45.397199588 +0000 UTC m=+110.768164523" Oct 08 07:18:46 crc kubenswrapper[4693]: I1008 07:18:46.362255 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:46 crc kubenswrapper[4693]: E1008 07:18:46.362525 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:46 crc kubenswrapper[4693]: I1008 07:18:46.363117 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:46 crc kubenswrapper[4693]: E1008 07:18:46.363339 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:47 crc kubenswrapper[4693]: I1008 07:18:47.362925 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:47 crc kubenswrapper[4693]: I1008 07:18:47.363071 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:47 crc kubenswrapper[4693]: E1008 07:18:47.364113 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:47 crc kubenswrapper[4693]: E1008 07:18:47.364234 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:48 crc kubenswrapper[4693]: I1008 07:18:48.362671 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:48 crc kubenswrapper[4693]: I1008 07:18:48.362927 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:48 crc kubenswrapper[4693]: E1008 07:18:48.363227 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:48 crc kubenswrapper[4693]: E1008 07:18:48.364143 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:49 crc kubenswrapper[4693]: I1008 07:18:49.362887 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:49 crc kubenswrapper[4693]: E1008 07:18:49.363139 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:49 crc kubenswrapper[4693]: I1008 07:18:49.363541 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:49 crc kubenswrapper[4693]: E1008 07:18:49.363668 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:50 crc kubenswrapper[4693]: I1008 07:18:50.046217 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bfhs8_8ddc214e-6569-4b0e-8783-f484a001ce6a/kube-multus/1.log" Oct 08 07:18:50 crc kubenswrapper[4693]: I1008 07:18:50.047272 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bfhs8_8ddc214e-6569-4b0e-8783-f484a001ce6a/kube-multus/0.log" Oct 08 07:18:50 crc kubenswrapper[4693]: I1008 07:18:50.047392 4693 generic.go:334] "Generic (PLEG): container finished" podID="8ddc214e-6569-4b0e-8783-f484a001ce6a" containerID="07051e612172de072a51e3a44710a13a800e43665c0a76d52b1989d34cf85d3a" exitCode=1 Oct 08 07:18:50 crc kubenswrapper[4693]: I1008 07:18:50.047470 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bfhs8" event={"ID":"8ddc214e-6569-4b0e-8783-f484a001ce6a","Type":"ContainerDied","Data":"07051e612172de072a51e3a44710a13a800e43665c0a76d52b1989d34cf85d3a"} Oct 08 07:18:50 crc kubenswrapper[4693]: I1008 07:18:50.047569 4693 scope.go:117] "RemoveContainer" containerID="0476df199e5fa527f98e38d68a3d581e04dd828ff27750c3efe0dee4da18c431" Oct 08 07:18:50 crc kubenswrapper[4693]: I1008 07:18:50.048347 4693 scope.go:117] "RemoveContainer" containerID="07051e612172de072a51e3a44710a13a800e43665c0a76d52b1989d34cf85d3a" Oct 08 07:18:50 crc kubenswrapper[4693]: E1008 07:18:50.048621 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-bfhs8_openshift-multus(8ddc214e-6569-4b0e-8783-f484a001ce6a)\"" pod="openshift-multus/multus-bfhs8" podUID="8ddc214e-6569-4b0e-8783-f484a001ce6a" Oct 08 07:18:50 crc kubenswrapper[4693]: I1008 07:18:50.362890 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:50 crc kubenswrapper[4693]: I1008 07:18:50.363097 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:50 crc kubenswrapper[4693]: E1008 07:18:50.364324 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:50 crc kubenswrapper[4693]: E1008 07:18:50.364663 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:50 crc kubenswrapper[4693]: I1008 07:18:50.366024 4693 scope.go:117] "RemoveContainer" containerID="96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc" Oct 08 07:18:51 crc kubenswrapper[4693]: I1008 07:18:51.053472 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bfhs8_8ddc214e-6569-4b0e-8783-f484a001ce6a/kube-multus/1.log" Oct 08 07:18:51 crc kubenswrapper[4693]: I1008 07:18:51.057297 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-snt7l_379c61a3-51ff-4bdf-ab8b-5af8bf090716/ovnkube-controller/3.log" Oct 08 07:18:51 crc kubenswrapper[4693]: I1008 07:18:51.060905 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" event={"ID":"379c61a3-51ff-4bdf-ab8b-5af8bf090716","Type":"ContainerStarted","Data":"0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd"} Oct 08 07:18:51 crc kubenswrapper[4693]: I1008 07:18:51.061581 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:18:51 crc kubenswrapper[4693]: I1008 07:18:51.116476 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" podStartSLOduration=96.116441489 podStartE2EDuration="1m36.116441489s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:18:51.105438955 +0000 UTC m=+116.476403910" watchObservedRunningTime="2025-10-08 07:18:51.116441489 +0000 UTC m=+116.487406444" Oct 08 07:18:51 crc kubenswrapper[4693]: I1008 07:18:51.363032 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:51 crc kubenswrapper[4693]: E1008 07:18:51.363252 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:51 crc kubenswrapper[4693]: I1008 07:18:51.363722 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:51 crc kubenswrapper[4693]: E1008 07:18:51.363850 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:51 crc kubenswrapper[4693]: I1008 07:18:51.371089 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-b2lbv"] Oct 08 07:18:51 crc kubenswrapper[4693]: I1008 07:18:51.371297 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:51 crc kubenswrapper[4693]: E1008 07:18:51.371515 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:52 crc kubenswrapper[4693]: I1008 07:18:52.362931 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:52 crc kubenswrapper[4693]: E1008 07:18:52.363149 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:53 crc kubenswrapper[4693]: I1008 07:18:53.362044 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:53 crc kubenswrapper[4693]: I1008 07:18:53.362141 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:53 crc kubenswrapper[4693]: E1008 07:18:53.362218 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:53 crc kubenswrapper[4693]: I1008 07:18:53.362358 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:53 crc kubenswrapper[4693]: E1008 07:18:53.362385 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:53 crc kubenswrapper[4693]: E1008 07:18:53.362500 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:54 crc kubenswrapper[4693]: I1008 07:18:54.362584 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:54 crc kubenswrapper[4693]: E1008 07:18:54.362800 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:55 crc kubenswrapper[4693]: I1008 07:18:55.362034 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:55 crc kubenswrapper[4693]: I1008 07:18:55.362092 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:55 crc kubenswrapper[4693]: I1008 07:18:55.362092 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:55 crc kubenswrapper[4693]: E1008 07:18:55.363399 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:55 crc kubenswrapper[4693]: E1008 07:18:55.364086 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:55 crc kubenswrapper[4693]: E1008 07:18:55.364174 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:55 crc kubenswrapper[4693]: E1008 07:18:55.383752 4693 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Oct 08 07:18:55 crc kubenswrapper[4693]: E1008 07:18:55.446343 4693 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 08 07:18:56 crc kubenswrapper[4693]: I1008 07:18:56.362168 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:56 crc kubenswrapper[4693]: E1008 07:18:56.362687 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:57 crc kubenswrapper[4693]: I1008 07:18:57.362714 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:57 crc kubenswrapper[4693]: I1008 07:18:57.362761 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:57 crc kubenswrapper[4693]: I1008 07:18:57.362724 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:57 crc kubenswrapper[4693]: E1008 07:18:57.362948 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:57 crc kubenswrapper[4693]: E1008 07:18:57.363116 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:57 crc kubenswrapper[4693]: E1008 07:18:57.363323 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:18:57 crc kubenswrapper[4693]: I1008 07:18:57.876917 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:18:58 crc kubenswrapper[4693]: I1008 07:18:58.361995 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:18:58 crc kubenswrapper[4693]: E1008 07:18:58.362172 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:18:59 crc kubenswrapper[4693]: I1008 07:18:59.362947 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:18:59 crc kubenswrapper[4693]: I1008 07:18:59.363006 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:18:59 crc kubenswrapper[4693]: E1008 07:18:59.363190 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:18:59 crc kubenswrapper[4693]: I1008 07:18:59.363257 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:18:59 crc kubenswrapper[4693]: E1008 07:18:59.363380 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:18:59 crc kubenswrapper[4693]: E1008 07:18:59.363488 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:19:00 crc kubenswrapper[4693]: I1008 07:19:00.362787 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:19:00 crc kubenswrapper[4693]: E1008 07:19:00.363605 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:19:00 crc kubenswrapper[4693]: E1008 07:19:00.448665 4693 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 08 07:19:01 crc kubenswrapper[4693]: I1008 07:19:01.362610 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:19:01 crc kubenswrapper[4693]: I1008 07:19:01.362647 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:19:01 crc kubenswrapper[4693]: I1008 07:19:01.362908 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:19:01 crc kubenswrapper[4693]: E1008 07:19:01.363016 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:19:01 crc kubenswrapper[4693]: E1008 07:19:01.363177 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:19:01 crc kubenswrapper[4693]: E1008 07:19:01.363302 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:19:02 crc kubenswrapper[4693]: I1008 07:19:02.362335 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:19:02 crc kubenswrapper[4693]: E1008 07:19:02.362658 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:19:03 crc kubenswrapper[4693]: I1008 07:19:03.362791 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:19:03 crc kubenswrapper[4693]: I1008 07:19:03.362899 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:19:03 crc kubenswrapper[4693]: I1008 07:19:03.363031 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:19:03 crc kubenswrapper[4693]: E1008 07:19:03.363039 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:19:03 crc kubenswrapper[4693]: E1008 07:19:03.363198 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:19:03 crc kubenswrapper[4693]: E1008 07:19:03.363328 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:19:04 crc kubenswrapper[4693]: I1008 07:19:04.362358 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:19:04 crc kubenswrapper[4693]: E1008 07:19:04.362571 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:19:05 crc kubenswrapper[4693]: I1008 07:19:05.362594 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:19:05 crc kubenswrapper[4693]: I1008 07:19:05.362713 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:19:05 crc kubenswrapper[4693]: I1008 07:19:05.363928 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:19:05 crc kubenswrapper[4693]: E1008 07:19:05.364250 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:19:05 crc kubenswrapper[4693]: I1008 07:19:05.364290 4693 scope.go:117] "RemoveContainer" containerID="07051e612172de072a51e3a44710a13a800e43665c0a76d52b1989d34cf85d3a" Oct 08 07:19:05 crc kubenswrapper[4693]: E1008 07:19:05.364298 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:19:05 crc kubenswrapper[4693]: E1008 07:19:05.364350 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:19:05 crc kubenswrapper[4693]: E1008 07:19:05.454141 4693 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 08 07:19:06 crc kubenswrapper[4693]: I1008 07:19:06.123096 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bfhs8_8ddc214e-6569-4b0e-8783-f484a001ce6a/kube-multus/1.log" Oct 08 07:19:06 crc kubenswrapper[4693]: I1008 07:19:06.123193 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bfhs8" event={"ID":"8ddc214e-6569-4b0e-8783-f484a001ce6a","Type":"ContainerStarted","Data":"4265598bf83e8b88e476a3eae9245760e378f6d7351187cc4599c3af2f31f4c8"} Oct 08 07:19:06 crc kubenswrapper[4693]: I1008 07:19:06.362425 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:19:06 crc kubenswrapper[4693]: E1008 07:19:06.362588 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:19:07 crc kubenswrapper[4693]: I1008 07:19:07.362976 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:19:07 crc kubenswrapper[4693]: I1008 07:19:07.363058 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:19:07 crc kubenswrapper[4693]: I1008 07:19:07.363255 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:19:07 crc kubenswrapper[4693]: E1008 07:19:07.363238 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:19:07 crc kubenswrapper[4693]: E1008 07:19:07.363432 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:19:07 crc kubenswrapper[4693]: E1008 07:19:07.363586 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:19:08 crc kubenswrapper[4693]: I1008 07:19:08.362600 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:19:08 crc kubenswrapper[4693]: E1008 07:19:08.362802 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:19:09 crc kubenswrapper[4693]: I1008 07:19:09.362578 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:19:09 crc kubenswrapper[4693]: I1008 07:19:09.362716 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:19:09 crc kubenswrapper[4693]: E1008 07:19:09.362802 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 08 07:19:09 crc kubenswrapper[4693]: E1008 07:19:09.362948 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-b2lbv" podUID="0f68f540-8d3f-4081-8c7e-cd5023991ada" Oct 08 07:19:09 crc kubenswrapper[4693]: I1008 07:19:09.363478 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:19:09 crc kubenswrapper[4693]: E1008 07:19:09.363566 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 08 07:19:10 crc kubenswrapper[4693]: I1008 07:19:10.362305 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:19:10 crc kubenswrapper[4693]: E1008 07:19:10.362870 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 08 07:19:11 crc kubenswrapper[4693]: I1008 07:19:11.362681 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:19:11 crc kubenswrapper[4693]: I1008 07:19:11.362692 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:19:11 crc kubenswrapper[4693]: I1008 07:19:11.362718 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:19:11 crc kubenswrapper[4693]: I1008 07:19:11.366477 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Oct 08 07:19:11 crc kubenswrapper[4693]: I1008 07:19:11.366748 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Oct 08 07:19:11 crc kubenswrapper[4693]: I1008 07:19:11.366953 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Oct 08 07:19:11 crc kubenswrapper[4693]: I1008 07:19:11.368059 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Oct 08 07:19:11 crc kubenswrapper[4693]: I1008 07:19:11.368145 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Oct 08 07:19:11 crc kubenswrapper[4693]: I1008 07:19:11.368793 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Oct 08 07:19:12 crc kubenswrapper[4693]: I1008 07:19:12.361968 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.667036 4693 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.727583 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-dh66b"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.728581 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-dh66b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.729690 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-sxz2w"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.740509 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-sxz2w" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.742753 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.743106 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.743330 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.743544 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.743751 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.744489 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.746647 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-mtqsq"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.748328 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mtqsq" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.754570 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.754949 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.755711 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.756257 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.756516 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.756798 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.757083 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.757284 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.757314 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.757552 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.764197 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b303a16a-8059-4d90-91ac-2ba5c953f346-images\") pod \"machine-api-operator-5694c8668f-dh66b\" (UID: \"b303a16a-8059-4d90-91ac-2ba5c953f346\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dh66b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.764353 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b303a16a-8059-4d90-91ac-2ba5c953f346-config\") pod \"machine-api-operator-5694c8668f-dh66b\" (UID: \"b303a16a-8059-4d90-91ac-2ba5c953f346\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dh66b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.764401 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6p4xn\" (UniqueName: \"kubernetes.io/projected/b303a16a-8059-4d90-91ac-2ba5c953f346-kube-api-access-6p4xn\") pod \"machine-api-operator-5694c8668f-dh66b\" (UID: \"b303a16a-8059-4d90-91ac-2ba5c953f346\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dh66b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.764535 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/b303a16a-8059-4d90-91ac-2ba5c953f346-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-dh66b\" (UID: \"b303a16a-8059-4d90-91ac-2ba5c953f346\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dh66b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.765751 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-29j9r"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.767622 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-rjk9l"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.767920 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-29j9r" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.770568 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-pwbtw"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.771032 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-pwbtw" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.771106 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.773702 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lmzcq"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.774350 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lmzcq" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.790584 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.790777 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.791044 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.791198 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.791230 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.791417 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.792705 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-ss8kz"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.793268 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-ss8kz" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.794072 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.794450 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.794737 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.794908 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.795019 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.795159 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.795173 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.795301 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.795362 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.795403 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.795308 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.795551 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f62ts"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.796140 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f62ts" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.796619 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.796889 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.796924 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.797048 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.797809 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.798749 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.798945 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cnxrg"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.799085 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.799666 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cnxrg" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.799673 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.799738 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.801345 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-pcr5x"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.801870 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.802488 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.802551 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.805030 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.810248 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.810507 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-t484z"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.811194 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.811481 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.815960 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-4tc6b"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.816444 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g6md4"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.816790 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g6md4" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.817096 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-4tc6b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.827950 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.828938 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.830961 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.833260 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.833861 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.834160 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.834453 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.834848 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.835035 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.835143 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.835195 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.835316 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.835348 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.835386 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.835397 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.835471 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.835628 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-gxwm2"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.835664 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.835961 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.836004 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.836211 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.836522 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.836896 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.837179 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.837464 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.837501 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.837666 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.837761 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.837869 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.838185 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.837670 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.839071 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.839890 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.840518 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.840618 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.840888 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.842842 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.843197 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k5p8c"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.843453 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-gxwm2" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.843650 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.843686 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.843797 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.843892 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.862209 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.863093 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k5p8c" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.865443 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-x7lvf"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.865900 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-m9s58"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.866394 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7p2nf"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.866541 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m9s58" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.866750 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.866852 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.866971 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.867090 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.867162 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.867328 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.867697 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.867883 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.868248 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.870704 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7p2nf" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.871864 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f13d519d-a936-4e80-b8a3-f1946cb85ac3-config\") pod \"route-controller-manager-6576b87f9c-ndznc\" (UID: \"f13d519d-a936-4e80-b8a3-f1946cb85ac3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.871898 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhc78\" (UniqueName: \"kubernetes.io/projected/f13d519d-a936-4e80-b8a3-f1946cb85ac3-kube-api-access-xhc78\") pod \"route-controller-manager-6576b87f9c-ndznc\" (UID: \"f13d519d-a936-4e80-b8a3-f1946cb85ac3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.871926 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/927ac9fe-c982-487c-8258-e137f2ba8cdb-config\") pod \"controller-manager-879f6c89f-pcr5x\" (UID: \"927ac9fe-c982-487c-8258-e137f2ba8cdb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.871948 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/db09f9b2-b06b-462e-a750-077bd093f03f-serving-cert\") pod \"etcd-operator-b45778765-4tc6b\" (UID: \"db09f9b2-b06b-462e-a750-077bd093f03f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4tc6b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.871970 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/49ba6508-c23e-4a06-aa9c-eb38fa4e8c08-trusted-ca\") pod \"console-operator-58897d9998-ss8kz\" (UID: \"49ba6508-c23e-4a06-aa9c-eb38fa4e8c08\") " pod="openshift-console-operator/console-operator-58897d9998-ss8kz" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.871994 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872019 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/82ab8b8f-7656-4d0f-9829-d222cd26b9aa-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-f62ts\" (UID: \"82ab8b8f-7656-4d0f-9829-d222cd26b9aa\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f62ts" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872058 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vw92\" (UniqueName: \"kubernetes.io/projected/abbbb752-3f68-4da0-b2d5-a6962f283a32-kube-api-access-4vw92\") pod \"openshift-controller-manager-operator-756b6f6bc6-g6md4\" (UID: \"abbbb752-3f68-4da0-b2d5-a6962f283a32\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g6md4" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872082 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/927ac9fe-c982-487c-8258-e137f2ba8cdb-client-ca\") pod \"controller-manager-879f6c89f-pcr5x\" (UID: \"927ac9fe-c982-487c-8258-e137f2ba8cdb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872104 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdk5l\" (UniqueName: \"kubernetes.io/projected/82ab8b8f-7656-4d0f-9829-d222cd26b9aa-kube-api-access-xdk5l\") pod \"cluster-samples-operator-665b6dd947-f62ts\" (UID: \"82ab8b8f-7656-4d0f-9829-d222cd26b9aa\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f62ts" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872124 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/927ac9fe-c982-487c-8258-e137f2ba8cdb-serving-cert\") pod \"controller-manager-879f6c89f-pcr5x\" (UID: \"927ac9fe-c982-487c-8258-e137f2ba8cdb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872147 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjgsf\" (UniqueName: \"kubernetes.io/projected/cfd35b00-963c-445b-8a48-a72ecdce0875-kube-api-access-cjgsf\") pod \"openshift-config-operator-7777fb866f-mtqsq\" (UID: \"cfd35b00-963c-445b-8a48-a72ecdce0875\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mtqsq" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872169 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s48gs\" (UniqueName: \"kubernetes.io/projected/e779da98-f489-4eac-9633-857e35f9d68a-kube-api-access-s48gs\") pod \"downloads-7954f5f757-pwbtw\" (UID: \"e779da98-f489-4eac-9633-857e35f9d68a\") " pod="openshift-console/downloads-7954f5f757-pwbtw" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872192 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abbbb752-3f68-4da0-b2d5-a6962f283a32-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-g6md4\" (UID: \"abbbb752-3f68-4da0-b2d5-a6962f283a32\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g6md4" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872216 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gtxr2\" (UniqueName: \"kubernetes.io/projected/0e7238a2-53fc-41ac-84a2-7000d86b9ceb-kube-api-access-gtxr2\") pod \"authentication-operator-69f744f599-sxz2w\" (UID: \"0e7238a2-53fc-41ac-84a2-7000d86b9ceb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-sxz2w" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872237 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db09f9b2-b06b-462e-a750-077bd093f03f-config\") pod \"etcd-operator-b45778765-4tc6b\" (UID: \"db09f9b2-b06b-462e-a750-077bd093f03f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4tc6b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872257 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c3089122-f24c-493c-ad56-eaa062b4937a-auth-proxy-config\") pod \"machine-approver-56656f9798-29j9r\" (UID: \"c3089122-f24c-493c-ad56-eaa062b4937a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-29j9r" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872282 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/5a760e51-6b0f-431c-ba90-99416b3f215a-node-pullsecrets\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872304 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872325 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/db09f9b2-b06b-462e-a750-077bd093f03f-etcd-client\") pod \"etcd-operator-b45778765-4tc6b\" (UID: \"db09f9b2-b06b-462e-a750-077bd093f03f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4tc6b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872351 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/49ba6508-c23e-4a06-aa9c-eb38fa4e8c08-serving-cert\") pod \"console-operator-58897d9998-ss8kz\" (UID: \"49ba6508-c23e-4a06-aa9c-eb38fa4e8c08\") " pod="openshift-console-operator/console-operator-58897d9998-ss8kz" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872372 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9hhz\" (UniqueName: \"kubernetes.io/projected/49ba6508-c23e-4a06-aa9c-eb38fa4e8c08-kube-api-access-z9hhz\") pod \"console-operator-58897d9998-ss8kz\" (UID: \"49ba6508-c23e-4a06-aa9c-eb38fa4e8c08\") " pod="openshift-console-operator/console-operator-58897d9998-ss8kz" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872394 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/325ef07c-02ef-45c3-9fc4-ecfe80adbbfe-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-cnxrg\" (UID: \"325ef07c-02ef-45c3-9fc4-ecfe80adbbfe\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cnxrg" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872419 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b303a16a-8059-4d90-91ac-2ba5c953f346-config\") pod \"machine-api-operator-5694c8668f-dh66b\" (UID: \"b303a16a-8059-4d90-91ac-2ba5c953f346\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dh66b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872444 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5a760e51-6b0f-431c-ba90-99416b3f215a-etcd-client\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872467 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872489 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/db09f9b2-b06b-462e-a750-077bd093f03f-etcd-service-ca\") pod \"etcd-operator-b45778765-4tc6b\" (UID: \"db09f9b2-b06b-462e-a750-077bd093f03f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4tc6b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872512 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49ba6508-c23e-4a06-aa9c-eb38fa4e8c08-config\") pod \"console-operator-58897d9998-ss8kz\" (UID: \"49ba6508-c23e-4a06-aa9c-eb38fa4e8c08\") " pod="openshift-console-operator/console-operator-58897d9998-ss8kz" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872534 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872558 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872579 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0e7238a2-53fc-41ac-84a2-7000d86b9ceb-service-ca-bundle\") pod \"authentication-operator-69f744f599-sxz2w\" (UID: \"0e7238a2-53fc-41ac-84a2-7000d86b9ceb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-sxz2w" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872603 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e9a6efb0-34a1-4419-a097-14877cb1371c-audit-policies\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872626 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3089122-f24c-493c-ad56-eaa062b4937a-config\") pod \"machine-approver-56656f9798-29j9r\" (UID: \"c3089122-f24c-493c-ad56-eaa062b4937a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-29j9r" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872648 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/5a760e51-6b0f-431c-ba90-99416b3f215a-audit\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872670 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e7238a2-53fc-41ac-84a2-7000d86b9ceb-config\") pod \"authentication-operator-69f744f599-sxz2w\" (UID: \"0e7238a2-53fc-41ac-84a2-7000d86b9ceb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-sxz2w" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872697 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c3089122-f24c-493c-ad56-eaa062b4937a-machine-approver-tls\") pod \"machine-approver-56656f9798-29j9r\" (UID: \"c3089122-f24c-493c-ad56-eaa062b4937a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-29j9r" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872722 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e2d82dd2-84a7-49e3-8704-31ff0e0dea1a-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-lmzcq\" (UID: \"e2d82dd2-84a7-49e3-8704-31ff0e0dea1a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lmzcq" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872746 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872768 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5a760e51-6b0f-431c-ba90-99416b3f215a-serving-cert\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872790 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5a760e51-6b0f-431c-ba90-99416b3f215a-audit-dir\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872816 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xb968\" (UniqueName: \"kubernetes.io/projected/e2d82dd2-84a7-49e3-8704-31ff0e0dea1a-kube-api-access-xb968\") pod \"openshift-apiserver-operator-796bbdcf4f-lmzcq\" (UID: \"e2d82dd2-84a7-49e3-8704-31ff0e0dea1a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lmzcq" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872858 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cfd35b00-963c-445b-8a48-a72ecdce0875-serving-cert\") pod \"openshift-config-operator-7777fb866f-mtqsq\" (UID: \"cfd35b00-963c-445b-8a48-a72ecdce0875\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mtqsq" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872881 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f13d519d-a936-4e80-b8a3-f1946cb85ac3-client-ca\") pod \"route-controller-manager-6576b87f9c-ndznc\" (UID: \"f13d519d-a936-4e80-b8a3-f1946cb85ac3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872906 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/b303a16a-8059-4d90-91ac-2ba5c953f346-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-dh66b\" (UID: \"b303a16a-8059-4d90-91ac-2ba5c953f346\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dh66b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872928 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e7238a2-53fc-41ac-84a2-7000d86b9ceb-serving-cert\") pod \"authentication-operator-69f744f599-sxz2w\" (UID: \"0e7238a2-53fc-41ac-84a2-7000d86b9ceb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-sxz2w" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872952 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.872976 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/325ef07c-02ef-45c3-9fc4-ecfe80adbbfe-config\") pod \"kube-controller-manager-operator-78b949d7b-cnxrg\" (UID: \"325ef07c-02ef-45c3-9fc4-ecfe80adbbfe\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cnxrg" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.873000 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b303a16a-8059-4d90-91ac-2ba5c953f346-images\") pod \"machine-api-operator-5694c8668f-dh66b\" (UID: \"b303a16a-8059-4d90-91ac-2ba5c953f346\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dh66b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.873025 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/cfd35b00-963c-445b-8a48-a72ecdce0875-available-featuregates\") pod \"openshift-config-operator-7777fb866f-mtqsq\" (UID: \"cfd35b00-963c-445b-8a48-a72ecdce0875\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mtqsq" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.873043 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.874109 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-m4zw6"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.873051 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wh2c\" (UniqueName: \"kubernetes.io/projected/c3089122-f24c-493c-ad56-eaa062b4937a-kube-api-access-2wh2c\") pod \"machine-approver-56656f9798-29j9r\" (UID: \"c3089122-f24c-493c-ad56-eaa062b4937a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-29j9r" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.874889 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.875321 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.875437 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/abbbb752-3f68-4da0-b2d5-a6962f283a32-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-g6md4\" (UID: \"abbbb752-3f68-4da0-b2d5-a6962f283a32\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g6md4" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.875333 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6gdq"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.875679 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b303a16a-8059-4d90-91ac-2ba5c953f346-config\") pod \"machine-api-operator-5694c8668f-dh66b\" (UID: \"b303a16a-8059-4d90-91ac-2ba5c953f346\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dh66b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.875786 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/325ef07c-02ef-45c3-9fc4-ecfe80adbbfe-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-cnxrg\" (UID: \"325ef07c-02ef-45c3-9fc4-ecfe80adbbfe\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cnxrg" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.875823 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9dwfq\" (UniqueName: \"kubernetes.io/projected/927ac9fe-c982-487c-8258-e137f2ba8cdb-kube-api-access-9dwfq\") pod \"controller-manager-879f6c89f-pcr5x\" (UID: \"927ac9fe-c982-487c-8258-e137f2ba8cdb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.875874 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6p4xn\" (UniqueName: \"kubernetes.io/projected/b303a16a-8059-4d90-91ac-2ba5c953f346-kube-api-access-6p4xn\") pod \"machine-api-operator-5694c8668f-dh66b\" (UID: \"b303a16a-8059-4d90-91ac-2ba5c953f346\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dh66b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.875898 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a760e51-6b0f-431c-ba90-99416b3f215a-config\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.875922 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/5a760e51-6b0f-431c-ba90-99416b3f215a-etcd-serving-ca\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.875969 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.876114 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e9a6efb0-34a1-4419-a097-14877cb1371c-audit-dir\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.876145 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.876184 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/927ac9fe-c982-487c-8258-e137f2ba8cdb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-pcr5x\" (UID: \"927ac9fe-c982-487c-8258-e137f2ba8cdb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.876190 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b303a16a-8059-4d90-91ac-2ba5c953f346-images\") pod \"machine-api-operator-5694c8668f-dh66b\" (UID: \"b303a16a-8059-4d90-91ac-2ba5c953f346\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dh66b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.876209 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5a760e51-6b0f-431c-ba90-99416b3f215a-trusted-ca-bundle\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.876235 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.876247 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.876302 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvz9k\" (UniqueName: \"kubernetes.io/projected/e9a6efb0-34a1-4419-a097-14877cb1371c-kube-api-access-wvz9k\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.876350 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/5a760e51-6b0f-431c-ba90-99416b3f215a-image-import-ca\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.876397 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/5a760e51-6b0f-431c-ba90-99416b3f215a-encryption-config\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.876421 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrg8x\" (UniqueName: \"kubernetes.io/projected/db09f9b2-b06b-462e-a750-077bd093f03f-kube-api-access-wrg8x\") pod \"etcd-operator-b45778765-4tc6b\" (UID: \"db09f9b2-b06b-462e-a750-077bd093f03f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4tc6b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.876454 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/db09f9b2-b06b-462e-a750-077bd093f03f-etcd-ca\") pod \"etcd-operator-b45778765-4tc6b\" (UID: \"db09f9b2-b06b-462e-a750-077bd093f03f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4tc6b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.876535 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phdpk\" (UniqueName: \"kubernetes.io/projected/5a760e51-6b0f-431c-ba90-99416b3f215a-kube-api-access-phdpk\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.876564 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0e7238a2-53fc-41ac-84a2-7000d86b9ceb-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-sxz2w\" (UID: \"0e7238a2-53fc-41ac-84a2-7000d86b9ceb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-sxz2w" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.876587 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f13d519d-a936-4e80-b8a3-f1946cb85ac3-serving-cert\") pod \"route-controller-manager-6576b87f9c-ndznc\" (UID: \"f13d519d-a936-4e80-b8a3-f1946cb85ac3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.876592 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.876615 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2d82dd2-84a7-49e3-8704-31ff0e0dea1a-config\") pod \"openshift-apiserver-operator-796bbdcf4f-lmzcq\" (UID: \"e2d82dd2-84a7-49e3-8704-31ff0e0dea1a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lmzcq" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.894538 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/b303a16a-8059-4d90-91ac-2ba5c953f346-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-dh66b\" (UID: \"b303a16a-8059-4d90-91ac-2ba5c953f346\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dh66b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.899283 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9t8ns"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.899967 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-tk68c"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.900443 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6gdq" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.900644 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-blq4r"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.900903 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9t8ns" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.902701 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk68c" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.903053 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.903181 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-2dtg5"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.903435 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-blq4r" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.904252 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2dtg5" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.904428 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.904671 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.908008 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-mt9pc"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.904934 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.905226 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.905312 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.910363 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cdzt4"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.905422 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.905515 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.910645 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-n2md8"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.910723 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cdzt4" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.910738 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mt9pc" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.911889 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-n2md8" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.913427 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.917098 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-qkgxg"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.917786 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-qkgxg" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.925837 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.931058 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gsb9l"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.932011 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-9rsfp"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.932547 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-9rsfp" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.934021 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gsb9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.938891 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331795-kwbx4"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.939664 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331795-kwbx4" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.942303 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-cs9f8"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.943117 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-97z2l"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.943355 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-cs9f8" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.944305 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-97z2l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.945783 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.956631 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ppwh6"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.958250 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kk6x6"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.961807 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kk6x6" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.959304 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.959374 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ppwh6" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.964290 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-pwbtw"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.973779 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-sxz2w"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.974585 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-dh66b"] Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.978322 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.978357 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/82ab8b8f-7656-4d0f-9829-d222cd26b9aa-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-f62ts\" (UID: \"82ab8b8f-7656-4d0f-9829-d222cd26b9aa\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f62ts" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.978386 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/927ac9fe-c982-487c-8258-e137f2ba8cdb-config\") pod \"controller-manager-879f6c89f-pcr5x\" (UID: \"927ac9fe-c982-487c-8258-e137f2ba8cdb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.978405 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/db09f9b2-b06b-462e-a750-077bd093f03f-serving-cert\") pod \"etcd-operator-b45778765-4tc6b\" (UID: \"db09f9b2-b06b-462e-a750-077bd093f03f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4tc6b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.978428 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/49ba6508-c23e-4a06-aa9c-eb38fa4e8c08-trusted-ca\") pod \"console-operator-58897d9998-ss8kz\" (UID: \"49ba6508-c23e-4a06-aa9c-eb38fa4e8c08\") " pod="openshift-console-operator/console-operator-58897d9998-ss8kz" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.978461 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vw92\" (UniqueName: \"kubernetes.io/projected/abbbb752-3f68-4da0-b2d5-a6962f283a32-kube-api-access-4vw92\") pod \"openshift-controller-manager-operator-756b6f6bc6-g6md4\" (UID: \"abbbb752-3f68-4da0-b2d5-a6962f283a32\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g6md4" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.978483 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/927ac9fe-c982-487c-8258-e137f2ba8cdb-client-ca\") pod \"controller-manager-879f6c89f-pcr5x\" (UID: \"927ac9fe-c982-487c-8258-e137f2ba8cdb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.978499 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjgsf\" (UniqueName: \"kubernetes.io/projected/cfd35b00-963c-445b-8a48-a72ecdce0875-kube-api-access-cjgsf\") pod \"openshift-config-operator-7777fb866f-mtqsq\" (UID: \"cfd35b00-963c-445b-8a48-a72ecdce0875\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mtqsq" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.978517 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s48gs\" (UniqueName: \"kubernetes.io/projected/e779da98-f489-4eac-9633-857e35f9d68a-kube-api-access-s48gs\") pod \"downloads-7954f5f757-pwbtw\" (UID: \"e779da98-f489-4eac-9633-857e35f9d68a\") " pod="openshift-console/downloads-7954f5f757-pwbtw" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.978554 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abbbb752-3f68-4da0-b2d5-a6962f283a32-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-g6md4\" (UID: \"abbbb752-3f68-4da0-b2d5-a6962f283a32\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g6md4" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.978575 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gtxr2\" (UniqueName: \"kubernetes.io/projected/0e7238a2-53fc-41ac-84a2-7000d86b9ceb-kube-api-access-gtxr2\") pod \"authentication-operator-69f744f599-sxz2w\" (UID: \"0e7238a2-53fc-41ac-84a2-7000d86b9ceb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-sxz2w" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.978593 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdk5l\" (UniqueName: \"kubernetes.io/projected/82ab8b8f-7656-4d0f-9829-d222cd26b9aa-kube-api-access-xdk5l\") pod \"cluster-samples-operator-665b6dd947-f62ts\" (UID: \"82ab8b8f-7656-4d0f-9829-d222cd26b9aa\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f62ts" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.978608 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/927ac9fe-c982-487c-8258-e137f2ba8cdb-serving-cert\") pod \"controller-manager-879f6c89f-pcr5x\" (UID: \"927ac9fe-c982-487c-8258-e137f2ba8cdb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.978626 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db09f9b2-b06b-462e-a750-077bd093f03f-config\") pod \"etcd-operator-b45778765-4tc6b\" (UID: \"db09f9b2-b06b-462e-a750-077bd093f03f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4tc6b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.978646 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c3089122-f24c-493c-ad56-eaa062b4937a-auth-proxy-config\") pod \"machine-approver-56656f9798-29j9r\" (UID: \"c3089122-f24c-493c-ad56-eaa062b4937a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-29j9r" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.978665 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/5a760e51-6b0f-431c-ba90-99416b3f215a-node-pullsecrets\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.978687 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.978707 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/db09f9b2-b06b-462e-a750-077bd093f03f-etcd-client\") pod \"etcd-operator-b45778765-4tc6b\" (UID: \"db09f9b2-b06b-462e-a750-077bd093f03f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4tc6b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.978726 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/49ba6508-c23e-4a06-aa9c-eb38fa4e8c08-serving-cert\") pod \"console-operator-58897d9998-ss8kz\" (UID: \"49ba6508-c23e-4a06-aa9c-eb38fa4e8c08\") " pod="openshift-console-operator/console-operator-58897d9998-ss8kz" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.978747 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9hhz\" (UniqueName: \"kubernetes.io/projected/49ba6508-c23e-4a06-aa9c-eb38fa4e8c08-kube-api-access-z9hhz\") pod \"console-operator-58897d9998-ss8kz\" (UID: \"49ba6508-c23e-4a06-aa9c-eb38fa4e8c08\") " pod="openshift-console-operator/console-operator-58897d9998-ss8kz" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.978765 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/325ef07c-02ef-45c3-9fc4-ecfe80adbbfe-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-cnxrg\" (UID: \"325ef07c-02ef-45c3-9fc4-ecfe80adbbfe\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cnxrg" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.978782 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5a760e51-6b0f-431c-ba90-99416b3f215a-etcd-client\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.978801 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.978845 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.978958 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.978998 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0e7238a2-53fc-41ac-84a2-7000d86b9ceb-service-ca-bundle\") pod \"authentication-operator-69f744f599-sxz2w\" (UID: \"0e7238a2-53fc-41ac-84a2-7000d86b9ceb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-sxz2w" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979015 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/db09f9b2-b06b-462e-a750-077bd093f03f-etcd-service-ca\") pod \"etcd-operator-b45778765-4tc6b\" (UID: \"db09f9b2-b06b-462e-a750-077bd093f03f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4tc6b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979032 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49ba6508-c23e-4a06-aa9c-eb38fa4e8c08-config\") pod \"console-operator-58897d9998-ss8kz\" (UID: \"49ba6508-c23e-4a06-aa9c-eb38fa4e8c08\") " pod="openshift-console-operator/console-operator-58897d9998-ss8kz" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979050 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e9a6efb0-34a1-4419-a097-14877cb1371c-audit-policies\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979067 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3089122-f24c-493c-ad56-eaa062b4937a-config\") pod \"machine-approver-56656f9798-29j9r\" (UID: \"c3089122-f24c-493c-ad56-eaa062b4937a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-29j9r" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979143 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/5a760e51-6b0f-431c-ba90-99416b3f215a-audit\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979167 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e7238a2-53fc-41ac-84a2-7000d86b9ceb-config\") pod \"authentication-operator-69f744f599-sxz2w\" (UID: \"0e7238a2-53fc-41ac-84a2-7000d86b9ceb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-sxz2w" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979185 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c3089122-f24c-493c-ad56-eaa062b4937a-machine-approver-tls\") pod \"machine-approver-56656f9798-29j9r\" (UID: \"c3089122-f24c-493c-ad56-eaa062b4937a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-29j9r" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979206 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e2d82dd2-84a7-49e3-8704-31ff0e0dea1a-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-lmzcq\" (UID: \"e2d82dd2-84a7-49e3-8704-31ff0e0dea1a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lmzcq" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979229 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979247 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5a760e51-6b0f-431c-ba90-99416b3f215a-serving-cert\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979269 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5a760e51-6b0f-431c-ba90-99416b3f215a-audit-dir\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979295 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cfd35b00-963c-445b-8a48-a72ecdce0875-serving-cert\") pod \"openshift-config-operator-7777fb866f-mtqsq\" (UID: \"cfd35b00-963c-445b-8a48-a72ecdce0875\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mtqsq" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979315 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f13d519d-a936-4e80-b8a3-f1946cb85ac3-client-ca\") pod \"route-controller-manager-6576b87f9c-ndznc\" (UID: \"f13d519d-a936-4e80-b8a3-f1946cb85ac3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979331 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xb968\" (UniqueName: \"kubernetes.io/projected/e2d82dd2-84a7-49e3-8704-31ff0e0dea1a-kube-api-access-xb968\") pod \"openshift-apiserver-operator-796bbdcf4f-lmzcq\" (UID: \"e2d82dd2-84a7-49e3-8704-31ff0e0dea1a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lmzcq" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979351 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e7238a2-53fc-41ac-84a2-7000d86b9ceb-serving-cert\") pod \"authentication-operator-69f744f599-sxz2w\" (UID: \"0e7238a2-53fc-41ac-84a2-7000d86b9ceb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-sxz2w" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979389 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979409 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/325ef07c-02ef-45c3-9fc4-ecfe80adbbfe-config\") pod \"kube-controller-manager-operator-78b949d7b-cnxrg\" (UID: \"325ef07c-02ef-45c3-9fc4-ecfe80adbbfe\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cnxrg" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979426 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/cfd35b00-963c-445b-8a48-a72ecdce0875-available-featuregates\") pod \"openshift-config-operator-7777fb866f-mtqsq\" (UID: \"cfd35b00-963c-445b-8a48-a72ecdce0875\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mtqsq" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979443 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wh2c\" (UniqueName: \"kubernetes.io/projected/c3089122-f24c-493c-ad56-eaa062b4937a-kube-api-access-2wh2c\") pod \"machine-approver-56656f9798-29j9r\" (UID: \"c3089122-f24c-493c-ad56-eaa062b4937a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-29j9r" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979477 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979498 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/abbbb752-3f68-4da0-b2d5-a6962f283a32-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-g6md4\" (UID: \"abbbb752-3f68-4da0-b2d5-a6962f283a32\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g6md4" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979516 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/325ef07c-02ef-45c3-9fc4-ecfe80adbbfe-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-cnxrg\" (UID: \"325ef07c-02ef-45c3-9fc4-ecfe80adbbfe\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cnxrg" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979543 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a760e51-6b0f-431c-ba90-99416b3f215a-config\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979561 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/5a760e51-6b0f-431c-ba90-99416b3f215a-etcd-serving-ca\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979583 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9dwfq\" (UniqueName: \"kubernetes.io/projected/927ac9fe-c982-487c-8258-e137f2ba8cdb-kube-api-access-9dwfq\") pod \"controller-manager-879f6c89f-pcr5x\" (UID: \"927ac9fe-c982-487c-8258-e137f2ba8cdb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979630 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979666 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e9a6efb0-34a1-4419-a097-14877cb1371c-audit-dir\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979729 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979757 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/927ac9fe-c982-487c-8258-e137f2ba8cdb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-pcr5x\" (UID: \"927ac9fe-c982-487c-8258-e137f2ba8cdb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979800 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/5a760e51-6b0f-431c-ba90-99416b3f215a-image-import-ca\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.979845 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5a760e51-6b0f-431c-ba90-99416b3f215a-trusted-ca-bundle\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.980259 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.980286 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvz9k\" (UniqueName: \"kubernetes.io/projected/e9a6efb0-34a1-4419-a097-14877cb1371c-kube-api-access-wvz9k\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.980304 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/5a760e51-6b0f-431c-ba90-99416b3f215a-encryption-config\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.980323 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrg8x\" (UniqueName: \"kubernetes.io/projected/db09f9b2-b06b-462e-a750-077bd093f03f-kube-api-access-wrg8x\") pod \"etcd-operator-b45778765-4tc6b\" (UID: \"db09f9b2-b06b-462e-a750-077bd093f03f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4tc6b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.980338 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/db09f9b2-b06b-462e-a750-077bd093f03f-etcd-ca\") pod \"etcd-operator-b45778765-4tc6b\" (UID: \"db09f9b2-b06b-462e-a750-077bd093f03f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4tc6b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.980371 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phdpk\" (UniqueName: \"kubernetes.io/projected/5a760e51-6b0f-431c-ba90-99416b3f215a-kube-api-access-phdpk\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.980403 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0e7238a2-53fc-41ac-84a2-7000d86b9ceb-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-sxz2w\" (UID: \"0e7238a2-53fc-41ac-84a2-7000d86b9ceb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-sxz2w" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.980432 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f13d519d-a936-4e80-b8a3-f1946cb85ac3-serving-cert\") pod \"route-controller-manager-6576b87f9c-ndznc\" (UID: \"f13d519d-a936-4e80-b8a3-f1946cb85ac3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.980460 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2d82dd2-84a7-49e3-8704-31ff0e0dea1a-config\") pod \"openshift-apiserver-operator-796bbdcf4f-lmzcq\" (UID: \"e2d82dd2-84a7-49e3-8704-31ff0e0dea1a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lmzcq" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.980479 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f13d519d-a936-4e80-b8a3-f1946cb85ac3-config\") pod \"route-controller-manager-6576b87f9c-ndznc\" (UID: \"f13d519d-a936-4e80-b8a3-f1946cb85ac3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.980522 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhc78\" (UniqueName: \"kubernetes.io/projected/f13d519d-a936-4e80-b8a3-f1946cb85ac3-kube-api-access-xhc78\") pod \"route-controller-manager-6576b87f9c-ndznc\" (UID: \"f13d519d-a936-4e80-b8a3-f1946cb85ac3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.980794 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/db09f9b2-b06b-462e-a750-077bd093f03f-etcd-service-ca\") pod \"etcd-operator-b45778765-4tc6b\" (UID: \"db09f9b2-b06b-462e-a750-077bd093f03f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4tc6b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.981988 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/927ac9fe-c982-487c-8258-e137f2ba8cdb-config\") pod \"controller-manager-879f6c89f-pcr5x\" (UID: \"927ac9fe-c982-487c-8258-e137f2ba8cdb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.982922 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5a760e51-6b0f-431c-ba90-99416b3f215a-audit-dir\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.983372 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e9a6efb0-34a1-4419-a097-14877cb1371c-audit-dir\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.984123 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/db09f9b2-b06b-462e-a750-077bd093f03f-etcd-ca\") pod \"etcd-operator-b45778765-4tc6b\" (UID: \"db09f9b2-b06b-462e-a750-077bd093f03f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4tc6b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.989057 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/cfd35b00-963c-445b-8a48-a72ecdce0875-available-featuregates\") pod \"openshift-config-operator-7777fb866f-mtqsq\" (UID: \"cfd35b00-963c-445b-8a48-a72ecdce0875\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mtqsq" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.989652 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/db09f9b2-b06b-462e-a750-077bd093f03f-serving-cert\") pod \"etcd-operator-b45778765-4tc6b\" (UID: \"db09f9b2-b06b-462e-a750-077bd093f03f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4tc6b" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.990728 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/325ef07c-02ef-45c3-9fc4-ecfe80adbbfe-config\") pod \"kube-controller-manager-operator-78b949d7b-cnxrg\" (UID: \"325ef07c-02ef-45c3-9fc4-ecfe80adbbfe\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cnxrg" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.992480 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/49ba6508-c23e-4a06-aa9c-eb38fa4e8c08-trusted-ca\") pod \"console-operator-58897d9998-ss8kz\" (UID: \"49ba6508-c23e-4a06-aa9c-eb38fa4e8c08\") " pod="openshift-console-operator/console-operator-58897d9998-ss8kz" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.992651 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/5a760e51-6b0f-431c-ba90-99416b3f215a-node-pullsecrets\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:13 crc kubenswrapper[4693]: I1008 07:19:13.994005 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.002303 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:13.990773 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f13d519d-a936-4e80-b8a3-f1946cb85ac3-config\") pod \"route-controller-manager-6576b87f9c-ndznc\" (UID: \"f13d519d-a936-4e80-b8a3-f1946cb85ac3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.002496 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.001635 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db09f9b2-b06b-462e-a750-077bd093f03f-config\") pod \"etcd-operator-b45778765-4tc6b\" (UID: \"db09f9b2-b06b-462e-a750-077bd093f03f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4tc6b" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.002516 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c3089122-f24c-493c-ad56-eaa062b4937a-auth-proxy-config\") pod \"machine-approver-56656f9798-29j9r\" (UID: \"c3089122-f24c-493c-ad56-eaa062b4937a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-29j9r" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.004059 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/325ef07c-02ef-45c3-9fc4-ecfe80adbbfe-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-cnxrg\" (UID: \"325ef07c-02ef-45c3-9fc4-ecfe80adbbfe\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cnxrg" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.003131 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.004701 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/927ac9fe-c982-487c-8258-e137f2ba8cdb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-pcr5x\" (UID: \"927ac9fe-c982-487c-8258-e137f2ba8cdb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.005539 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0e7238a2-53fc-41ac-84a2-7000d86b9ceb-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-sxz2w\" (UID: \"0e7238a2-53fc-41ac-84a2-7000d86b9ceb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-sxz2w" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.005577 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/927ac9fe-c982-487c-8258-e137f2ba8cdb-client-ca\") pod \"controller-manager-879f6c89f-pcr5x\" (UID: \"927ac9fe-c982-487c-8258-e137f2ba8cdb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.006495 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.007599 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/5a760e51-6b0f-431c-ba90-99416b3f215a-image-import-ca\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.014392 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0e7238a2-53fc-41ac-84a2-7000d86b9ceb-service-ca-bundle\") pod \"authentication-operator-69f744f599-sxz2w\" (UID: \"0e7238a2-53fc-41ac-84a2-7000d86b9ceb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-sxz2w" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.014666 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abbbb752-3f68-4da0-b2d5-a6962f283a32-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-g6md4\" (UID: \"abbbb752-3f68-4da0-b2d5-a6962f283a32\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g6md4" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.015950 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.016380 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cfd35b00-963c-445b-8a48-a72ecdce0875-serving-cert\") pod \"openshift-config-operator-7777fb866f-mtqsq\" (UID: \"cfd35b00-963c-445b-8a48-a72ecdce0875\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mtqsq" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.016846 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2d82dd2-84a7-49e3-8704-31ff0e0dea1a-config\") pod \"openshift-apiserver-operator-796bbdcf4f-lmzcq\" (UID: \"e2d82dd2-84a7-49e3-8704-31ff0e0dea1a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lmzcq" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.016984 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a760e51-6b0f-431c-ba90-99416b3f215a-config\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.017075 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/5a760e51-6b0f-431c-ba90-99416b3f215a-etcd-serving-ca\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.017166 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f13d519d-a936-4e80-b8a3-f1946cb85ac3-client-ca\") pod \"route-controller-manager-6576b87f9c-ndznc\" (UID: \"f13d519d-a936-4e80-b8a3-f1946cb85ac3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.017303 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5a760e51-6b0f-431c-ba90-99416b3f215a-trusted-ca-bundle\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.017488 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5a760e51-6b0f-431c-ba90-99416b3f215a-etcd-client\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.017800 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.022312 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-rjk9l"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.023287 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.024261 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e9a6efb0-34a1-4419-a097-14877cb1371c-audit-policies\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.009317 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/927ac9fe-c982-487c-8258-e137f2ba8cdb-serving-cert\") pod \"controller-manager-879f6c89f-pcr5x\" (UID: \"927ac9fe-c982-487c-8258-e137f2ba8cdb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.030148 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/5a760e51-6b0f-431c-ba90-99416b3f215a-audit\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.030155 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3089122-f24c-493c-ad56-eaa062b4937a-config\") pod \"machine-approver-56656f9798-29j9r\" (UID: \"c3089122-f24c-493c-ad56-eaa062b4937a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-29j9r" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.033090 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/db09f9b2-b06b-462e-a750-077bd093f03f-etcd-client\") pod \"etcd-operator-b45778765-4tc6b\" (UID: \"db09f9b2-b06b-462e-a750-077bd093f03f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4tc6b" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.033381 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/abbbb752-3f68-4da0-b2d5-a6962f283a32-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-g6md4\" (UID: \"abbbb752-3f68-4da0-b2d5-a6962f283a32\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g6md4" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.033653 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.032804 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49ba6508-c23e-4a06-aa9c-eb38fa4e8c08-config\") pod \"console-operator-58897d9998-ss8kz\" (UID: \"49ba6508-c23e-4a06-aa9c-eb38fa4e8c08\") " pod="openshift-console-operator/console-operator-58897d9998-ss8kz" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.033973 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.034123 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.043236 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.043618 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.048034 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/5a760e51-6b0f-431c-ba90-99416b3f215a-encryption-config\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.048115 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.048214 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e7238a2-53fc-41ac-84a2-7000d86b9ceb-config\") pod \"authentication-operator-69f744f599-sxz2w\" (UID: \"0e7238a2-53fc-41ac-84a2-7000d86b9ceb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-sxz2w" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.048204 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e2d82dd2-84a7-49e3-8704-31ff0e0dea1a-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-lmzcq\" (UID: \"e2d82dd2-84a7-49e3-8704-31ff0e0dea1a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lmzcq" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.031892 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.048575 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5a760e51-6b0f-431c-ba90-99416b3f215a-serving-cert\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.049071 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/82ab8b8f-7656-4d0f-9829-d222cd26b9aa-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-f62ts\" (UID: \"82ab8b8f-7656-4d0f-9829-d222cd26b9aa\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f62ts" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.049461 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e7238a2-53fc-41ac-84a2-7000d86b9ceb-serving-cert\") pod \"authentication-operator-69f744f599-sxz2w\" (UID: \"0e7238a2-53fc-41ac-84a2-7000d86b9ceb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-sxz2w" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.049915 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-sgmv4"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.050038 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/49ba6508-c23e-4a06-aa9c-eb38fa4e8c08-serving-cert\") pod \"console-operator-58897d9998-ss8kz\" (UID: \"49ba6508-c23e-4a06-aa9c-eb38fa4e8c08\") " pod="openshift-console-operator/console-operator-58897d9998-ss8kz" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.050146 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c3089122-f24c-493c-ad56-eaa062b4937a-machine-approver-tls\") pod \"machine-approver-56656f9798-29j9r\" (UID: \"c3089122-f24c-493c-ad56-eaa062b4937a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-29j9r" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.050748 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f13d519d-a936-4e80-b8a3-f1946cb85ac3-serving-cert\") pod \"route-controller-manager-6576b87f9c-ndznc\" (UID: \"f13d519d-a936-4e80-b8a3-f1946cb85ac3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.050854 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-ss8kz"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.051002 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-sgmv4" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.054848 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.056595 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-mtqsq"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.056747 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.059032 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-2dtg5"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.062029 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cnxrg"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.064660 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-9rsfp"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.069070 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k5p8c"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.070608 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.070952 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-m9s58"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.073183 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f62ts"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.074680 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.078929 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6gdq"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.081330 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-4tc6b"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.083395 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-pcr5x"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.085218 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-mt9pc"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.086480 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7p2nf"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.087627 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g6md4"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.088671 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-gxwm2"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.089968 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-227k8"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.090155 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.091145 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-787bm"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.091271 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-227k8" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.091699 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-787bm" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.092000 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-tk68c"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.093205 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-cs9f8"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.094508 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lmzcq"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.095675 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9t8ns"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.097042 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-97z2l"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.098289 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-qkgxg"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.100109 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-x7lvf"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.102578 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-n2md8"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.103580 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331795-kwbx4"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.104617 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ppwh6"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.105727 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-t484z"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.106847 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gsb9l"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.108037 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-m4zw6"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.109108 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cdzt4"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.110292 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-s4gbb"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.110301 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.111149 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-s4gbb" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.111779 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-787bm"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.113253 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kk6x6"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.122730 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-sgmv4"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.123345 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-227k8"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.131857 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.150522 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.170694 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.190931 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.211159 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.231603 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.251389 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.272061 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.297952 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.311374 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.331142 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.351052 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.371174 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.390633 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.421548 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.431587 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.451231 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.471079 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.511262 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.517332 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6p4xn\" (UniqueName: \"kubernetes.io/projected/b303a16a-8059-4d90-91ac-2ba5c953f346-kube-api-access-6p4xn\") pod \"machine-api-operator-5694c8668f-dh66b\" (UID: \"b303a16a-8059-4d90-91ac-2ba5c953f346\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dh66b" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.531369 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.552159 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.572220 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.591935 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.612420 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.632025 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.653135 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.672607 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.687968 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-dh66b" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.693339 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.710906 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.732268 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.756199 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.771620 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.793378 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.812853 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.831867 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.852321 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.872175 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.892205 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.911061 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.929167 4693 request.go:700] Waited for 1.018207184s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/configmaps?fieldSelector=metadata.name%3Dopenshift-service-ca.crt&limit=500&resourceVersion=0 Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.932065 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.952144 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.970813 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.971324 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-dh66b"] Oct 08 07:19:14 crc kubenswrapper[4693]: I1008 07:19:14.993358 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.011606 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.034041 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.051179 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.072956 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.091680 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.112736 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.151721 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.171169 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.175152 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-dh66b" event={"ID":"b303a16a-8059-4d90-91ac-2ba5c953f346","Type":"ContainerStarted","Data":"26d7b7d74893988346f356f2de9b91a138c7957ae7c2d54ded3b725c8763b32f"} Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.175318 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-dh66b" event={"ID":"b303a16a-8059-4d90-91ac-2ba5c953f346","Type":"ContainerStarted","Data":"1187ffa880d57eb1956a24366508b7fae04fdb953e3e238d9db2969ab945c1c1"} Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.193258 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.210885 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.234123 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.252130 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.272235 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.292059 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.312648 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.332635 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.351609 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.372205 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.394579 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.412768 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.431397 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.452034 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.481909 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.491950 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.512430 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.552640 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.571790 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.619890 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vw92\" (UniqueName: \"kubernetes.io/projected/abbbb752-3f68-4da0-b2d5-a6962f283a32-kube-api-access-4vw92\") pod \"openshift-controller-manager-operator-756b6f6bc6-g6md4\" (UID: \"abbbb752-3f68-4da0-b2d5-a6962f283a32\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g6md4" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.634845 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhc78\" (UniqueName: \"kubernetes.io/projected/f13d519d-a936-4e80-b8a3-f1946cb85ac3-kube-api-access-xhc78\") pod \"route-controller-manager-6576b87f9c-ndznc\" (UID: \"f13d519d-a936-4e80-b8a3-f1946cb85ac3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.650141 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phdpk\" (UniqueName: \"kubernetes.io/projected/5a760e51-6b0f-431c-ba90-99416b3f215a-kube-api-access-phdpk\") pod \"apiserver-76f77b778f-t484z\" (UID: \"5a760e51-6b0f-431c-ba90-99416b3f215a\") " pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.671025 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvz9k\" (UniqueName: \"kubernetes.io/projected/e9a6efb0-34a1-4419-a097-14877cb1371c-kube-api-access-wvz9k\") pod \"oauth-openshift-558db77b4-rjk9l\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.675676 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.695931 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xb968\" (UniqueName: \"kubernetes.io/projected/e2d82dd2-84a7-49e3-8704-31ff0e0dea1a-kube-api-access-xb968\") pod \"openshift-apiserver-operator-796bbdcf4f-lmzcq\" (UID: \"e2d82dd2-84a7-49e3-8704-31ff0e0dea1a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lmzcq" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.715041 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdk5l\" (UniqueName: \"kubernetes.io/projected/82ab8b8f-7656-4d0f-9829-d222cd26b9aa-kube-api-access-xdk5l\") pod \"cluster-samples-operator-665b6dd947-f62ts\" (UID: \"82ab8b8f-7656-4d0f-9829-d222cd26b9aa\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f62ts" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.731015 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/325ef07c-02ef-45c3-9fc4-ecfe80adbbfe-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-cnxrg\" (UID: \"325ef07c-02ef-45c3-9fc4-ecfe80adbbfe\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cnxrg" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.744408 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lmzcq" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.749284 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrg8x\" (UniqueName: \"kubernetes.io/projected/db09f9b2-b06b-462e-a750-077bd093f03f-kube-api-access-wrg8x\") pod \"etcd-operator-b45778765-4tc6b\" (UID: \"db09f9b2-b06b-462e-a750-077bd093f03f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4tc6b" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.767656 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wh2c\" (UniqueName: \"kubernetes.io/projected/c3089122-f24c-493c-ad56-eaa062b4937a-kube-api-access-2wh2c\") pod \"machine-approver-56656f9798-29j9r\" (UID: \"c3089122-f24c-493c-ad56-eaa062b4937a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-29j9r" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.806853 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9hhz\" (UniqueName: \"kubernetes.io/projected/49ba6508-c23e-4a06-aa9c-eb38fa4e8c08-kube-api-access-z9hhz\") pod \"console-operator-58897d9998-ss8kz\" (UID: \"49ba6508-c23e-4a06-aa9c-eb38fa4e8c08\") " pod="openshift-console-operator/console-operator-58897d9998-ss8kz" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.819163 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f62ts" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.825719 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gtxr2\" (UniqueName: \"kubernetes.io/projected/0e7238a2-53fc-41ac-84a2-7000d86b9ceb-kube-api-access-gtxr2\") pod \"authentication-operator-69f744f599-sxz2w\" (UID: \"0e7238a2-53fc-41ac-84a2-7000d86b9ceb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-sxz2w" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.826009 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s48gs\" (UniqueName: \"kubernetes.io/projected/e779da98-f489-4eac-9633-857e35f9d68a-kube-api-access-s48gs\") pod \"downloads-7954f5f757-pwbtw\" (UID: \"e779da98-f489-4eac-9633-857e35f9d68a\") " pod="openshift-console/downloads-7954f5f757-pwbtw" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.847314 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjgsf\" (UniqueName: \"kubernetes.io/projected/cfd35b00-963c-445b-8a48-a72ecdce0875-kube-api-access-cjgsf\") pod \"openshift-config-operator-7777fb866f-mtqsq\" (UID: \"cfd35b00-963c-445b-8a48-a72ecdce0875\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mtqsq" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.854272 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.864413 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cnxrg" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.873407 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9dwfq\" (UniqueName: \"kubernetes.io/projected/927ac9fe-c982-487c-8258-e137f2ba8cdb-kube-api-access-9dwfq\") pod \"controller-manager-879f6c89f-pcr5x\" (UID: \"927ac9fe-c982-487c-8258-e137f2ba8cdb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.874797 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.882165 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.887442 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-sxz2w" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.887449 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g6md4" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.899336 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-4tc6b" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.899623 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.906117 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mtqsq" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.915951 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.930536 4693 request.go:700] Waited for 1.838876992s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/hostpath-provisioner/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.933833 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.934476 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-rjk9l"] Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.943605 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-29j9r" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.969282 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.984430 4693 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Oct 08 07:19:15 crc kubenswrapper[4693]: I1008 07:19:15.993576 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Oct 08 07:19:15 crc kubenswrapper[4693]: W1008 07:19:15.994290 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode9a6efb0_34a1_4419_a097_14877cb1371c.slice/crio-001cd2a75b34fd678b07d97b8a06f4f5a108cfd1171b7de7300881d051708c09 WatchSource:0}: Error finding container 001cd2a75b34fd678b07d97b8a06f4f5a108cfd1171b7de7300881d051708c09: Status 404 returned error can't find the container with id 001cd2a75b34fd678b07d97b8a06f4f5a108cfd1171b7de7300881d051708c09 Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.013242 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.021213 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-pwbtw" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.027498 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lmzcq"] Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.031080 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.052384 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Oct 08 07:19:16 crc kubenswrapper[4693]: W1008 07:19:16.062028 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode2d82dd2_84a7_49e3_8704_31ff0e0dea1a.slice/crio-dd26a4381ea9d343154e99e9aa566f5a3e5b2daddde3373602addad829dab768 WatchSource:0}: Error finding container dd26a4381ea9d343154e99e9aa566f5a3e5b2daddde3373602addad829dab768: Status 404 returned error can't find the container with id dd26a4381ea9d343154e99e9aa566f5a3e5b2daddde3373602addad829dab768 Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.070643 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-ss8kz" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.071330 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.092326 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.114935 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.158905 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f62ts"] Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.171386 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.210479 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cnxrg"] Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.236269 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/926d3e66-e18c-4e73-830c-59e0d75071c7-serving-cert\") pod \"apiserver-7bbb656c7d-xstkp\" (UID: \"926d3e66-e18c-4e73-830c-59e0d75071c7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.236303 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6srr\" (UniqueName: \"kubernetes.io/projected/db2f35c9-5b39-4db5-9ec8-052cff6b8b8b-kube-api-access-t6srr\") pod \"packageserver-d55dfcdfc-n2md8\" (UID: \"db2f35c9-5b39-4db5-9ec8-052cff6b8b8b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-n2md8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.236324 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e414e1c2-aeac-4bb5-9061-07c81c6a4630-proxy-tls\") pod \"machine-config-controller-84d6567774-tk68c\" (UID: \"e414e1c2-aeac-4bb5-9061-07c81c6a4630\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk68c" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.236339 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b2104bc-e24a-4bcc-b312-53374a90564c-config\") pod \"kube-apiserver-operator-766d6c64bb-7p2nf\" (UID: \"4b2104bc-e24a-4bcc-b312-53374a90564c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7p2nf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.236396 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/30132782-7481-4ce8-957f-759ea66a4eaf-stats-auth\") pod \"router-default-5444994796-blq4r\" (UID: \"30132782-7481-4ce8-957f-759ea66a4eaf\") " pod="openshift-ingress/router-default-5444994796-blq4r" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.236541 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4mbl\" (UniqueName: \"kubernetes.io/projected/e947463f-afe0-40a7-8f9f-b5d76d2086d0-kube-api-access-v4mbl\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.236581 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/742a75c4-cb63-480a-961e-c6adaa5835f6-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-k5p8c\" (UID: \"742a75c4-cb63-480a-961e-c6adaa5835f6\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k5p8c" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.236704 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/742a75c4-cb63-480a-961e-c6adaa5835f6-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-k5p8c\" (UID: \"742a75c4-cb63-480a-961e-c6adaa5835f6\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k5p8c" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.236779 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c120941a-2403-4c12-8383-892fef07e53b-metrics-tls\") pod \"dns-operator-744455d44c-gxwm2\" (UID: \"c120941a-2403-4c12-8383-892fef07e53b\") " pod="openshift-dns-operator/dns-operator-744455d44c-gxwm2" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.236879 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/db2f35c9-5b39-4db5-9ec8-052cff6b8b8b-apiservice-cert\") pod \"packageserver-d55dfcdfc-n2md8\" (UID: \"db2f35c9-5b39-4db5-9ec8-052cff6b8b8b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-n2md8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.236944 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e947463f-afe0-40a7-8f9f-b5d76d2086d0-bound-sa-token\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.236977 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/c2d8737c-16dd-429e-a6e0-3d2c35877083-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-9t8ns\" (UID: \"c2d8737c-16dd-429e-a6e0-3d2c35877083\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9t8ns" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.237036 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7eb66c1-f2b2-4116-9b68-6ffec11fb8bd-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-f6gdq\" (UID: \"c7eb66c1-f2b2-4116-9b68-6ffec11fb8bd\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6gdq" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.237055 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5kwk\" (UniqueName: \"kubernetes.io/projected/2840730a-aa51-4168-84e9-9db9b8d136a1-kube-api-access-k5kwk\") pod \"multus-admission-controller-857f4d67dd-qkgxg\" (UID: \"2840730a-aa51-4168-84e9-9db9b8d136a1\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qkgxg" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.237769 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c9e8b78a-82c5-422c-b105-d7169a771c3e-auth-proxy-config\") pod \"machine-config-operator-74547568cd-2dtg5\" (UID: \"c9e8b78a-82c5-422c-b105-d7169a771c3e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2dtg5" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.238045 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/926d3e66-e18c-4e73-830c-59e0d75071c7-encryption-config\") pod \"apiserver-7bbb656c7d-xstkp\" (UID: \"926d3e66-e18c-4e73-830c-59e0d75071c7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.238150 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-br8l5\" (UniqueName: \"kubernetes.io/projected/c120941a-2403-4c12-8383-892fef07e53b-kube-api-access-br8l5\") pod \"dns-operator-744455d44c-gxwm2\" (UID: \"c120941a-2403-4c12-8383-892fef07e53b\") " pod="openshift-dns-operator/dns-operator-744455d44c-gxwm2" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.238186 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/c90da226-e34a-4d72-a64d-132a45439e4d-console-config\") pod \"console-f9d7485db-x7lvf\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.238350 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/3629c487-2222-4044-a9cd-934694064ec5-signing-key\") pod \"service-ca-9c57cc56f-9rsfp\" (UID: \"3629c487-2222-4044-a9cd-934694064ec5\") " pod="openshift-service-ca/service-ca-9c57cc56f-9rsfp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.238375 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/926d3e66-e18c-4e73-830c-59e0d75071c7-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-xstkp\" (UID: \"926d3e66-e18c-4e73-830c-59e0d75071c7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.238433 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/db2f35c9-5b39-4db5-9ec8-052cff6b8b8b-tmpfs\") pod \"packageserver-d55dfcdfc-n2md8\" (UID: \"db2f35c9-5b39-4db5-9ec8-052cff6b8b8b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-n2md8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.238467 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e947463f-afe0-40a7-8f9f-b5d76d2086d0-trusted-ca\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.238532 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/c90da226-e34a-4d72-a64d-132a45439e4d-console-serving-cert\") pod \"console-f9d7485db-x7lvf\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.238670 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f9aa9747-0f32-4924-a6f3-ffa16180c7a4-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-gsb9l\" (UID: \"f9aa9747-0f32-4924-a6f3-ffa16180c7a4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gsb9l" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.238697 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fs7mc\" (UniqueName: \"kubernetes.io/projected/3629c487-2222-4044-a9cd-934694064ec5-kube-api-access-fs7mc\") pod \"service-ca-9c57cc56f-9rsfp\" (UID: \"3629c487-2222-4044-a9cd-934694064ec5\") " pod="openshift-service-ca/service-ca-9c57cc56f-9rsfp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.238763 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7eb66c1-f2b2-4116-9b68-6ffec11fb8bd-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-f6gdq\" (UID: \"c7eb66c1-f2b2-4116-9b68-6ffec11fb8bd\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6gdq" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.238781 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/362f8698-0eba-4e0b-b11d-ca82cc479688-bound-sa-token\") pod \"ingress-operator-5b745b69d9-m9s58\" (UID: \"362f8698-0eba-4e0b-b11d-ca82cc479688\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m9s58" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.238862 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.238937 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5kgz\" (UniqueName: \"kubernetes.io/projected/02b76fdb-64e7-461a-906e-19cbda64cdc7-kube-api-access-d5kgz\") pod \"olm-operator-6b444d44fb-cdzt4\" (UID: \"02b76fdb-64e7-461a-906e-19cbda64cdc7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cdzt4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.239036 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2snw6\" (UniqueName: \"kubernetes.io/projected/926d3e66-e18c-4e73-830c-59e0d75071c7-kube-api-access-2snw6\") pod \"apiserver-7bbb656c7d-xstkp\" (UID: \"926d3e66-e18c-4e73-830c-59e0d75071c7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.239281 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/3629c487-2222-4044-a9cd-934694064ec5-signing-cabundle\") pod \"service-ca-9c57cc56f-9rsfp\" (UID: \"3629c487-2222-4044-a9cd-934694064ec5\") " pod="openshift-service-ca/service-ca-9c57cc56f-9rsfp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.239316 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzwfz\" (UniqueName: \"kubernetes.io/projected/4bd3780a-bf97-4d18-a493-7ae54cf02750-kube-api-access-lzwfz\") pod \"migrator-59844c95c7-mt9pc\" (UID: \"4bd3780a-bf97-4d18-a493-7ae54cf02750\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mt9pc" Oct 08 07:19:16 crc kubenswrapper[4693]: E1008 07:19:16.239334 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:16.739319398 +0000 UTC m=+142.110284343 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.239352 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e947463f-afe0-40a7-8f9f-b5d76d2086d0-installation-pull-secrets\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.239386 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkdvk\" (UniqueName: \"kubernetes.io/projected/c2d8737c-16dd-429e-a6e0-3d2c35877083-kube-api-access-tkdvk\") pod \"control-plane-machine-set-operator-78cbb6b69f-9t8ns\" (UID: \"c2d8737c-16dd-429e-a6e0-3d2c35877083\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9t8ns" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.239455 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/02b76fdb-64e7-461a-906e-19cbda64cdc7-srv-cert\") pod \"olm-operator-6b444d44fb-cdzt4\" (UID: \"02b76fdb-64e7-461a-906e-19cbda64cdc7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cdzt4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.239486 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/362f8698-0eba-4e0b-b11d-ca82cc479688-trusted-ca\") pod \"ingress-operator-5b745b69d9-m9s58\" (UID: \"362f8698-0eba-4e0b-b11d-ca82cc479688\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m9s58" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.239571 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/c9e8b78a-82c5-422c-b105-d7169a771c3e-images\") pod \"machine-config-operator-74547568cd-2dtg5\" (UID: \"c9e8b78a-82c5-422c-b105-d7169a771c3e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2dtg5" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.239696 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e947463f-afe0-40a7-8f9f-b5d76d2086d0-registry-certificates\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.239738 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9sbj4\" (UniqueName: \"kubernetes.io/projected/f9aa9747-0f32-4924-a6f3-ffa16180c7a4-kube-api-access-9sbj4\") pod \"cluster-image-registry-operator-dc59b4c8b-gsb9l\" (UID: \"f9aa9747-0f32-4924-a6f3-ffa16180c7a4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gsb9l" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.239897 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gg5n\" (UniqueName: \"kubernetes.io/projected/c9e8b78a-82c5-422c-b105-d7169a771c3e-kube-api-access-8gg5n\") pod \"machine-config-operator-74547568cd-2dtg5\" (UID: \"c9e8b78a-82c5-422c-b105-d7169a771c3e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2dtg5" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.240197 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c9e8b78a-82c5-422c-b105-d7169a771c3e-proxy-tls\") pod \"machine-config-operator-74547568cd-2dtg5\" (UID: \"c9e8b78a-82c5-422c-b105-d7169a771c3e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2dtg5" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.240239 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/c90da226-e34a-4d72-a64d-132a45439e4d-oauth-serving-cert\") pod \"console-f9d7485db-x7lvf\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.240349 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/f9aa9747-0f32-4924-a6f3-ffa16180c7a4-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-gsb9l\" (UID: \"f9aa9747-0f32-4924-a6f3-ffa16180c7a4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gsb9l" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.240371 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e947463f-afe0-40a7-8f9f-b5d76d2086d0-ca-trust-extracted\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.240400 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4b2104bc-e24a-4bcc-b312-53374a90564c-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-7p2nf\" (UID: \"4b2104bc-e24a-4bcc-b312-53374a90564c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7p2nf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.240417 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4b2104bc-e24a-4bcc-b312-53374a90564c-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-7p2nf\" (UID: \"4b2104bc-e24a-4bcc-b312-53374a90564c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7p2nf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.240433 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/db2f35c9-5b39-4db5-9ec8-052cff6b8b8b-webhook-cert\") pod \"packageserver-d55dfcdfc-n2md8\" (UID: \"db2f35c9-5b39-4db5-9ec8-052cff6b8b8b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-n2md8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.240452 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxfjb\" (UniqueName: \"kubernetes.io/projected/362f8698-0eba-4e0b-b11d-ca82cc479688-kube-api-access-jxfjb\") pod \"ingress-operator-5b745b69d9-m9s58\" (UID: \"362f8698-0eba-4e0b-b11d-ca82cc479688\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m9s58" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.240504 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e414e1c2-aeac-4bb5-9061-07c81c6a4630-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-tk68c\" (UID: \"e414e1c2-aeac-4bb5-9061-07c81c6a4630\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk68c" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.240545 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clv4g\" (UniqueName: \"kubernetes.io/projected/c7eb66c1-f2b2-4116-9b68-6ffec11fb8bd-kube-api-access-clv4g\") pod \"kube-storage-version-migrator-operator-b67b599dd-f6gdq\" (UID: \"c7eb66c1-f2b2-4116-9b68-6ffec11fb8bd\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6gdq" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.240564 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/926d3e66-e18c-4e73-830c-59e0d75071c7-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-xstkp\" (UID: \"926d3e66-e18c-4e73-830c-59e0d75071c7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.240593 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/926d3e66-e18c-4e73-830c-59e0d75071c7-audit-policies\") pod \"apiserver-7bbb656c7d-xstkp\" (UID: \"926d3e66-e18c-4e73-830c-59e0d75071c7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.240609 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7768\" (UniqueName: \"kubernetes.io/projected/c90da226-e34a-4d72-a64d-132a45439e4d-kube-api-access-t7768\") pod \"console-f9d7485db-x7lvf\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.240707 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/742a75c4-cb63-480a-961e-c6adaa5835f6-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-k5p8c\" (UID: \"742a75c4-cb63-480a-961e-c6adaa5835f6\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k5p8c" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.240739 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e947463f-afe0-40a7-8f9f-b5d76d2086d0-registry-tls\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.240786 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c90da226-e34a-4d72-a64d-132a45439e4d-service-ca\") pod \"console-f9d7485db-x7lvf\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.240801 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/02b76fdb-64e7-461a-906e-19cbda64cdc7-profile-collector-cert\") pod \"olm-operator-6b444d44fb-cdzt4\" (UID: \"02b76fdb-64e7-461a-906e-19cbda64cdc7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cdzt4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.240836 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/926d3e66-e18c-4e73-830c-59e0d75071c7-audit-dir\") pod \"apiserver-7bbb656c7d-xstkp\" (UID: \"926d3e66-e18c-4e73-830c-59e0d75071c7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.240857 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/30132782-7481-4ce8-957f-759ea66a4eaf-default-certificate\") pod \"router-default-5444994796-blq4r\" (UID: \"30132782-7481-4ce8-957f-759ea66a4eaf\") " pod="openshift-ingress/router-default-5444994796-blq4r" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.240881 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/30132782-7481-4ce8-957f-759ea66a4eaf-service-ca-bundle\") pod \"router-default-5444994796-blq4r\" (UID: \"30132782-7481-4ce8-957f-759ea66a4eaf\") " pod="openshift-ingress/router-default-5444994796-blq4r" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.240897 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/30132782-7481-4ce8-957f-759ea66a4eaf-metrics-certs\") pod \"router-default-5444994796-blq4r\" (UID: \"30132782-7481-4ce8-957f-759ea66a4eaf\") " pod="openshift-ingress/router-default-5444994796-blq4r" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.240914 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/c90da226-e34a-4d72-a64d-132a45439e4d-console-oauth-config\") pod \"console-f9d7485db-x7lvf\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.241026 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2840730a-aa51-4168-84e9-9db9b8d136a1-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-qkgxg\" (UID: \"2840730a-aa51-4168-84e9-9db9b8d136a1\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qkgxg" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.241079 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/926d3e66-e18c-4e73-830c-59e0d75071c7-etcd-client\") pod \"apiserver-7bbb656c7d-xstkp\" (UID: \"926d3e66-e18c-4e73-830c-59e0d75071c7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.241104 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcklt\" (UniqueName: \"kubernetes.io/projected/e414e1c2-aeac-4bb5-9061-07c81c6a4630-kube-api-access-xcklt\") pod \"machine-config-controller-84d6567774-tk68c\" (UID: \"e414e1c2-aeac-4bb5-9061-07c81c6a4630\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk68c" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.241131 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f9aa9747-0f32-4924-a6f3-ffa16180c7a4-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-gsb9l\" (UID: \"f9aa9747-0f32-4924-a6f3-ffa16180c7a4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gsb9l" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.241159 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c90da226-e34a-4d72-a64d-132a45439e4d-trusted-ca-bundle\") pod \"console-f9d7485db-x7lvf\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.241164 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lmzcq" event={"ID":"e2d82dd2-84a7-49e3-8704-31ff0e0dea1a","Type":"ContainerStarted","Data":"dd26a4381ea9d343154e99e9aa566f5a3e5b2daddde3373602addad829dab768"} Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.241195 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/362f8698-0eba-4e0b-b11d-ca82cc479688-metrics-tls\") pod \"ingress-operator-5b745b69d9-m9s58\" (UID: \"362f8698-0eba-4e0b-b11d-ca82cc479688\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m9s58" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.241326 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzxbt\" (UniqueName: \"kubernetes.io/projected/30132782-7481-4ce8-957f-759ea66a4eaf-kube-api-access-bzxbt\") pod \"router-default-5444994796-blq4r\" (UID: \"30132782-7481-4ce8-957f-759ea66a4eaf\") " pod="openshift-ingress/router-default-5444994796-blq4r" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.244505 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-29j9r" event={"ID":"c3089122-f24c-493c-ad56-eaa062b4937a","Type":"ContainerStarted","Data":"3659ad60c37dffff3aba28cd85988d7d11a9fe34903c9162ed57a0e2a9d94e08"} Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.246196 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" event={"ID":"e9a6efb0-34a1-4419-a097-14877cb1371c","Type":"ContainerStarted","Data":"001cd2a75b34fd678b07d97b8a06f4f5a108cfd1171b7de7300881d051708c09"} Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.274071 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-dh66b" event={"ID":"b303a16a-8059-4d90-91ac-2ba5c953f346","Type":"ContainerStarted","Data":"4c47129f58a68a4d974873c8c60c8a0fb14f44d1d3f4e61401a722512a32b1ae"} Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.342735 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.343512 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/3629c487-2222-4044-a9cd-934694064ec5-signing-key\") pod \"service-ca-9c57cc56f-9rsfp\" (UID: \"3629c487-2222-4044-a9cd-934694064ec5\") " pod="openshift-service-ca/service-ca-9c57cc56f-9rsfp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.343546 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/045e2499-3e6f-4ea0-8036-ba25d897c4da-secret-volume\") pod \"collect-profiles-29331795-kwbx4\" (UID: \"045e2499-3e6f-4ea0-8036-ba25d897c4da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331795-kwbx4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.343573 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/926d3e66-e18c-4e73-830c-59e0d75071c7-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-xstkp\" (UID: \"926d3e66-e18c-4e73-830c-59e0d75071c7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.343594 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/db2f35c9-5b39-4db5-9ec8-052cff6b8b8b-tmpfs\") pod \"packageserver-d55dfcdfc-n2md8\" (UID: \"db2f35c9-5b39-4db5-9ec8-052cff6b8b8b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-n2md8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.343613 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e947463f-afe0-40a7-8f9f-b5d76d2086d0-trusted-ca\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.343635 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzg27\" (UniqueName: \"kubernetes.io/projected/cec7db18-2326-4d53-80e6-abce8210a82e-kube-api-access-nzg27\") pod \"service-ca-operator-777779d784-cs9f8\" (UID: \"cec7db18-2326-4d53-80e6-abce8210a82e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cs9f8" Oct 08 07:19:16 crc kubenswrapper[4693]: E1008 07:19:16.343668 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:16.843638849 +0000 UTC m=+142.214603784 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.343730 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mq56z\" (UniqueName: \"kubernetes.io/projected/0e338d6f-e361-4a3d-85cb-6c1f100270f2-kube-api-access-mq56z\") pod \"package-server-manager-789f6589d5-kk6x6\" (UID: \"0e338d6f-e361-4a3d-85cb-6c1f100270f2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kk6x6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.343776 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/c90da226-e34a-4d72-a64d-132a45439e4d-console-serving-cert\") pod \"console-f9d7485db-x7lvf\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.343806 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f9aa9747-0f32-4924-a6f3-ffa16180c7a4-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-gsb9l\" (UID: \"f9aa9747-0f32-4924-a6f3-ffa16180c7a4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gsb9l" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.343839 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fs7mc\" (UniqueName: \"kubernetes.io/projected/3629c487-2222-4044-a9cd-934694064ec5-kube-api-access-fs7mc\") pod \"service-ca-9c57cc56f-9rsfp\" (UID: \"3629c487-2222-4044-a9cd-934694064ec5\") " pod="openshift-service-ca/service-ca-9c57cc56f-9rsfp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.343864 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/dafbb968-6b8e-4aea-94eb-aecfc938ecf0-metrics-tls\") pod \"dns-default-sgmv4\" (UID: \"dafbb968-6b8e-4aea-94eb-aecfc938ecf0\") " pod="openshift-dns/dns-default-sgmv4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.343883 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7eb66c1-f2b2-4116-9b68-6ffec11fb8bd-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-f6gdq\" (UID: \"c7eb66c1-f2b2-4116-9b68-6ffec11fb8bd\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6gdq" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.343901 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/362f8698-0eba-4e0b-b11d-ca82cc479688-bound-sa-token\") pod \"ingress-operator-5b745b69d9-m9s58\" (UID: \"362f8698-0eba-4e0b-b11d-ca82cc479688\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m9s58" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.343917 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvdn6\" (UniqueName: \"kubernetes.io/projected/dafbb968-6b8e-4aea-94eb-aecfc938ecf0-kube-api-access-gvdn6\") pod \"dns-default-sgmv4\" (UID: \"dafbb968-6b8e-4aea-94eb-aecfc938ecf0\") " pod="openshift-dns/dns-default-sgmv4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.343944 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.343969 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5kgz\" (UniqueName: \"kubernetes.io/projected/02b76fdb-64e7-461a-906e-19cbda64cdc7-kube-api-access-d5kgz\") pod \"olm-operator-6b444d44fb-cdzt4\" (UID: \"02b76fdb-64e7-461a-906e-19cbda64cdc7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cdzt4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344009 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2snw6\" (UniqueName: \"kubernetes.io/projected/926d3e66-e18c-4e73-830c-59e0d75071c7-kube-api-access-2snw6\") pod \"apiserver-7bbb656c7d-xstkp\" (UID: \"926d3e66-e18c-4e73-830c-59e0d75071c7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344043 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/3629c487-2222-4044-a9cd-934694064ec5-signing-cabundle\") pod \"service-ca-9c57cc56f-9rsfp\" (UID: \"3629c487-2222-4044-a9cd-934694064ec5\") " pod="openshift-service-ca/service-ca-9c57cc56f-9rsfp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344061 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzwfz\" (UniqueName: \"kubernetes.io/projected/4bd3780a-bf97-4d18-a493-7ae54cf02750-kube-api-access-lzwfz\") pod \"migrator-59844c95c7-mt9pc\" (UID: \"4bd3780a-bf97-4d18-a493-7ae54cf02750\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mt9pc" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344082 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e947463f-afe0-40a7-8f9f-b5d76d2086d0-installation-pull-secrets\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344101 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkdvk\" (UniqueName: \"kubernetes.io/projected/c2d8737c-16dd-429e-a6e0-3d2c35877083-kube-api-access-tkdvk\") pod \"control-plane-machine-set-operator-78cbb6b69f-9t8ns\" (UID: \"c2d8737c-16dd-429e-a6e0-3d2c35877083\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9t8ns" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344131 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/02b76fdb-64e7-461a-906e-19cbda64cdc7-srv-cert\") pod \"olm-operator-6b444d44fb-cdzt4\" (UID: \"02b76fdb-64e7-461a-906e-19cbda64cdc7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cdzt4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344146 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/362f8698-0eba-4e0b-b11d-ca82cc479688-trusted-ca\") pod \"ingress-operator-5b745b69d9-m9s58\" (UID: \"362f8698-0eba-4e0b-b11d-ca82cc479688\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m9s58" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344168 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/c9e8b78a-82c5-422c-b105-d7169a771c3e-images\") pod \"machine-config-operator-74547568cd-2dtg5\" (UID: \"c9e8b78a-82c5-422c-b105-d7169a771c3e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2dtg5" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344187 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e947463f-afe0-40a7-8f9f-b5d76d2086d0-registry-certificates\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344204 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9sbj4\" (UniqueName: \"kubernetes.io/projected/f9aa9747-0f32-4924-a6f3-ffa16180c7a4-kube-api-access-9sbj4\") pod \"cluster-image-registry-operator-dc59b4c8b-gsb9l\" (UID: \"f9aa9747-0f32-4924-a6f3-ffa16180c7a4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gsb9l" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344224 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7g45\" (UniqueName: \"kubernetes.io/projected/9073bf89-0d7f-42f0-b1c2-493584fd64ee-kube-api-access-z7g45\") pod \"catalog-operator-68c6474976-ppwh6\" (UID: \"9073bf89-0d7f-42f0-b1c2-493584fd64ee\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ppwh6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344257 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/4f00a0d4-1d78-4ad4-808c-373fb017fe95-mountpoint-dir\") pod \"csi-hostpathplugin-227k8\" (UID: \"4f00a0d4-1d78-4ad4-808c-373fb017fe95\") " pod="hostpath-provisioner/csi-hostpathplugin-227k8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344276 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzc7x\" (UniqueName: \"kubernetes.io/projected/045e2499-3e6f-4ea0-8036-ba25d897c4da-kube-api-access-pzc7x\") pod \"collect-profiles-29331795-kwbx4\" (UID: \"045e2499-3e6f-4ea0-8036-ba25d897c4da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331795-kwbx4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344293 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/045e2499-3e6f-4ea0-8036-ba25d897c4da-config-volume\") pod \"collect-profiles-29331795-kwbx4\" (UID: \"045e2499-3e6f-4ea0-8036-ba25d897c4da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331795-kwbx4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344309 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/0e338d6f-e361-4a3d-85cb-6c1f100270f2-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-kk6x6\" (UID: \"0e338d6f-e361-4a3d-85cb-6c1f100270f2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kk6x6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344331 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gg5n\" (UniqueName: \"kubernetes.io/projected/c9e8b78a-82c5-422c-b105-d7169a771c3e-kube-api-access-8gg5n\") pod \"machine-config-operator-74547568cd-2dtg5\" (UID: \"c9e8b78a-82c5-422c-b105-d7169a771c3e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2dtg5" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344350 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cec7db18-2326-4d53-80e6-abce8210a82e-serving-cert\") pod \"service-ca-operator-777779d784-cs9f8\" (UID: \"cec7db18-2326-4d53-80e6-abce8210a82e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cs9f8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344367 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9073bf89-0d7f-42f0-b1c2-493584fd64ee-srv-cert\") pod \"catalog-operator-68c6474976-ppwh6\" (UID: \"9073bf89-0d7f-42f0-b1c2-493584fd64ee\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ppwh6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344408 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c9e8b78a-82c5-422c-b105-d7169a771c3e-proxy-tls\") pod \"machine-config-operator-74547568cd-2dtg5\" (UID: \"c9e8b78a-82c5-422c-b105-d7169a771c3e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2dtg5" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344426 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/c90da226-e34a-4d72-a64d-132a45439e4d-oauth-serving-cert\") pod \"console-f9d7485db-x7lvf\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344443 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/4f00a0d4-1d78-4ad4-808c-373fb017fe95-registration-dir\") pod \"csi-hostpathplugin-227k8\" (UID: \"4f00a0d4-1d78-4ad4-808c-373fb017fe95\") " pod="hostpath-provisioner/csi-hostpathplugin-227k8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344461 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/1b656307-4735-491e-9c35-107eb4bc04a8-node-bootstrap-token\") pod \"machine-config-server-s4gbb\" (UID: \"1b656307-4735-491e-9c35-107eb4bc04a8\") " pod="openshift-machine-config-operator/machine-config-server-s4gbb" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344478 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4fa30d38-cd25-470f-9195-b2cc226f3b7b-cert\") pod \"ingress-canary-787bm\" (UID: \"4fa30d38-cd25-470f-9195-b2cc226f3b7b\") " pod="openshift-ingress-canary/ingress-canary-787bm" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344556 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/f9aa9747-0f32-4924-a6f3-ffa16180c7a4-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-gsb9l\" (UID: \"f9aa9747-0f32-4924-a6f3-ffa16180c7a4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gsb9l" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344576 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/4f00a0d4-1d78-4ad4-808c-373fb017fe95-csi-data-dir\") pod \"csi-hostpathplugin-227k8\" (UID: \"4f00a0d4-1d78-4ad4-808c-373fb017fe95\") " pod="hostpath-provisioner/csi-hostpathplugin-227k8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344628 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e947463f-afe0-40a7-8f9f-b5d76d2086d0-ca-trust-extracted\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344680 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4b2104bc-e24a-4bcc-b312-53374a90564c-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-7p2nf\" (UID: \"4b2104bc-e24a-4bcc-b312-53374a90564c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7p2nf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344717 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4b2104bc-e24a-4bcc-b312-53374a90564c-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-7p2nf\" (UID: \"4b2104bc-e24a-4bcc-b312-53374a90564c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7p2nf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344745 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/db2f35c9-5b39-4db5-9ec8-052cff6b8b8b-webhook-cert\") pod \"packageserver-d55dfcdfc-n2md8\" (UID: \"db2f35c9-5b39-4db5-9ec8-052cff6b8b8b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-n2md8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344773 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pjgt\" (UniqueName: \"kubernetes.io/projected/4fa30d38-cd25-470f-9195-b2cc226f3b7b-kube-api-access-6pjgt\") pod \"ingress-canary-787bm\" (UID: \"4fa30d38-cd25-470f-9195-b2cc226f3b7b\") " pod="openshift-ingress-canary/ingress-canary-787bm" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344803 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxfjb\" (UniqueName: \"kubernetes.io/projected/362f8698-0eba-4e0b-b11d-ca82cc479688-kube-api-access-jxfjb\") pod \"ingress-operator-5b745b69d9-m9s58\" (UID: \"362f8698-0eba-4e0b-b11d-ca82cc479688\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m9s58" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344851 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e414e1c2-aeac-4bb5-9061-07c81c6a4630-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-tk68c\" (UID: \"e414e1c2-aeac-4bb5-9061-07c81c6a4630\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk68c" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344883 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clv4g\" (UniqueName: \"kubernetes.io/projected/c7eb66c1-f2b2-4116-9b68-6ffec11fb8bd-kube-api-access-clv4g\") pod \"kube-storage-version-migrator-operator-b67b599dd-f6gdq\" (UID: \"c7eb66c1-f2b2-4116-9b68-6ffec11fb8bd\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6gdq" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344909 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/926d3e66-e18c-4e73-830c-59e0d75071c7-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-xstkp\" (UID: \"926d3e66-e18c-4e73-830c-59e0d75071c7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344938 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/926d3e66-e18c-4e73-830c-59e0d75071c7-audit-policies\") pod \"apiserver-7bbb656c7d-xstkp\" (UID: \"926d3e66-e18c-4e73-830c-59e0d75071c7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344963 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7768\" (UniqueName: \"kubernetes.io/projected/c90da226-e34a-4d72-a64d-132a45439e4d-kube-api-access-t7768\") pod \"console-f9d7485db-x7lvf\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.344994 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/742a75c4-cb63-480a-961e-c6adaa5835f6-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-k5p8c\" (UID: \"742a75c4-cb63-480a-961e-c6adaa5835f6\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k5p8c" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345021 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xlch\" (UniqueName: \"kubernetes.io/projected/1b656307-4735-491e-9c35-107eb4bc04a8-kube-api-access-8xlch\") pod \"machine-config-server-s4gbb\" (UID: \"1b656307-4735-491e-9c35-107eb4bc04a8\") " pod="openshift-machine-config-operator/machine-config-server-s4gbb" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345051 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e947463f-afe0-40a7-8f9f-b5d76d2086d0-registry-tls\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345077 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c90da226-e34a-4d72-a64d-132a45439e4d-service-ca\") pod \"console-f9d7485db-x7lvf\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345083 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e947463f-afe0-40a7-8f9f-b5d76d2086d0-ca-trust-extracted\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345101 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/02b76fdb-64e7-461a-906e-19cbda64cdc7-profile-collector-cert\") pod \"olm-operator-6b444d44fb-cdzt4\" (UID: \"02b76fdb-64e7-461a-906e-19cbda64cdc7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cdzt4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345130 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/926d3e66-e18c-4e73-830c-59e0d75071c7-audit-dir\") pod \"apiserver-7bbb656c7d-xstkp\" (UID: \"926d3e66-e18c-4e73-830c-59e0d75071c7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345157 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/30132782-7481-4ce8-957f-759ea66a4eaf-default-certificate\") pod \"router-default-5444994796-blq4r\" (UID: \"30132782-7481-4ce8-957f-759ea66a4eaf\") " pod="openshift-ingress/router-default-5444994796-blq4r" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345187 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/30132782-7481-4ce8-957f-759ea66a4eaf-service-ca-bundle\") pod \"router-default-5444994796-blq4r\" (UID: \"30132782-7481-4ce8-957f-759ea66a4eaf\") " pod="openshift-ingress/router-default-5444994796-blq4r" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345210 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/30132782-7481-4ce8-957f-759ea66a4eaf-metrics-certs\") pod \"router-default-5444994796-blq4r\" (UID: \"30132782-7481-4ce8-957f-759ea66a4eaf\") " pod="openshift-ingress/router-default-5444994796-blq4r" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345234 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/c90da226-e34a-4d72-a64d-132a45439e4d-console-oauth-config\") pod \"console-f9d7485db-x7lvf\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345260 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2840730a-aa51-4168-84e9-9db9b8d136a1-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-qkgxg\" (UID: \"2840730a-aa51-4168-84e9-9db9b8d136a1\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qkgxg" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345285 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8stf\" (UniqueName: \"kubernetes.io/projected/4f00a0d4-1d78-4ad4-808c-373fb017fe95-kube-api-access-l8stf\") pod \"csi-hostpathplugin-227k8\" (UID: \"4f00a0d4-1d78-4ad4-808c-373fb017fe95\") " pod="hostpath-provisioner/csi-hostpathplugin-227k8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345324 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/926d3e66-e18c-4e73-830c-59e0d75071c7-etcd-client\") pod \"apiserver-7bbb656c7d-xstkp\" (UID: \"926d3e66-e18c-4e73-830c-59e0d75071c7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345349 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcklt\" (UniqueName: \"kubernetes.io/projected/e414e1c2-aeac-4bb5-9061-07c81c6a4630-kube-api-access-xcklt\") pod \"machine-config-controller-84d6567774-tk68c\" (UID: \"e414e1c2-aeac-4bb5-9061-07c81c6a4630\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk68c" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345386 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f9aa9747-0f32-4924-a6f3-ffa16180c7a4-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-gsb9l\" (UID: \"f9aa9747-0f32-4924-a6f3-ffa16180c7a4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gsb9l" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345410 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c90da226-e34a-4d72-a64d-132a45439e4d-trusted-ca-bundle\") pod \"console-f9d7485db-x7lvf\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345437 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/362f8698-0eba-4e0b-b11d-ca82cc479688-metrics-tls\") pod \"ingress-operator-5b745b69d9-m9s58\" (UID: \"362f8698-0eba-4e0b-b11d-ca82cc479688\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m9s58" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345462 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/9073bf89-0d7f-42f0-b1c2-493584fd64ee-profile-collector-cert\") pod \"catalog-operator-68c6474976-ppwh6\" (UID: \"9073bf89-0d7f-42f0-b1c2-493584fd64ee\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ppwh6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345489 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a9d1eea1-3570-4937-a89b-5c6d87551b30-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-97z2l\" (UID: \"a9d1eea1-3570-4937-a89b-5c6d87551b30\") " pod="openshift-marketplace/marketplace-operator-79b997595-97z2l" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345518 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzxbt\" (UniqueName: \"kubernetes.io/projected/30132782-7481-4ce8-957f-759ea66a4eaf-kube-api-access-bzxbt\") pod \"router-default-5444994796-blq4r\" (UID: \"30132782-7481-4ce8-957f-759ea66a4eaf\") " pod="openshift-ingress/router-default-5444994796-blq4r" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345541 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/926d3e66-e18c-4e73-830c-59e0d75071c7-serving-cert\") pod \"apiserver-7bbb656c7d-xstkp\" (UID: \"926d3e66-e18c-4e73-830c-59e0d75071c7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345564 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6srr\" (UniqueName: \"kubernetes.io/projected/db2f35c9-5b39-4db5-9ec8-052cff6b8b8b-kube-api-access-t6srr\") pod \"packageserver-d55dfcdfc-n2md8\" (UID: \"db2f35c9-5b39-4db5-9ec8-052cff6b8b8b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-n2md8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345589 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e414e1c2-aeac-4bb5-9061-07c81c6a4630-proxy-tls\") pod \"machine-config-controller-84d6567774-tk68c\" (UID: \"e414e1c2-aeac-4bb5-9061-07c81c6a4630\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk68c" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345612 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b2104bc-e24a-4bcc-b312-53374a90564c-config\") pod \"kube-apiserver-operator-766d6c64bb-7p2nf\" (UID: \"4b2104bc-e24a-4bcc-b312-53374a90564c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7p2nf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345645 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a9d1eea1-3570-4937-a89b-5c6d87551b30-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-97z2l\" (UID: \"a9d1eea1-3570-4937-a89b-5c6d87551b30\") " pod="openshift-marketplace/marketplace-operator-79b997595-97z2l" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345669 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rc5st\" (UniqueName: \"kubernetes.io/projected/a9d1eea1-3570-4937-a89b-5c6d87551b30-kube-api-access-rc5st\") pod \"marketplace-operator-79b997595-97z2l\" (UID: \"a9d1eea1-3570-4937-a89b-5c6d87551b30\") " pod="openshift-marketplace/marketplace-operator-79b997595-97z2l" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345693 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/30132782-7481-4ce8-957f-759ea66a4eaf-stats-auth\") pod \"router-default-5444994796-blq4r\" (UID: \"30132782-7481-4ce8-957f-759ea66a4eaf\") " pod="openshift-ingress/router-default-5444994796-blq4r" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345714 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/c9e8b78a-82c5-422c-b105-d7169a771c3e-images\") pod \"machine-config-operator-74547568cd-2dtg5\" (UID: \"c9e8b78a-82c5-422c-b105-d7169a771c3e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2dtg5" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345720 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4mbl\" (UniqueName: \"kubernetes.io/projected/e947463f-afe0-40a7-8f9f-b5d76d2086d0-kube-api-access-v4mbl\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345742 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/742a75c4-cb63-480a-961e-c6adaa5835f6-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-k5p8c\" (UID: \"742a75c4-cb63-480a-961e-c6adaa5835f6\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k5p8c" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345765 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/742a75c4-cb63-480a-961e-c6adaa5835f6-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-k5p8c\" (UID: \"742a75c4-cb63-480a-961e-c6adaa5835f6\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k5p8c" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345788 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c120941a-2403-4c12-8383-892fef07e53b-metrics-tls\") pod \"dns-operator-744455d44c-gxwm2\" (UID: \"c120941a-2403-4c12-8383-892fef07e53b\") " pod="openshift-dns-operator/dns-operator-744455d44c-gxwm2" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345813 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/db2f35c9-5b39-4db5-9ec8-052cff6b8b8b-apiservice-cert\") pod \"packageserver-d55dfcdfc-n2md8\" (UID: \"db2f35c9-5b39-4db5-9ec8-052cff6b8b8b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-n2md8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345865 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e947463f-afe0-40a7-8f9f-b5d76d2086d0-bound-sa-token\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345893 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/c2d8737c-16dd-429e-a6e0-3d2c35877083-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-9t8ns\" (UID: \"c2d8737c-16dd-429e-a6e0-3d2c35877083\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9t8ns" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345921 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7eb66c1-f2b2-4116-9b68-6ffec11fb8bd-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-f6gdq\" (UID: \"c7eb66c1-f2b2-4116-9b68-6ffec11fb8bd\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6gdq" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345945 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5kwk\" (UniqueName: \"kubernetes.io/projected/2840730a-aa51-4168-84e9-9db9b8d136a1-kube-api-access-k5kwk\") pod \"multus-admission-controller-857f4d67dd-qkgxg\" (UID: \"2840730a-aa51-4168-84e9-9db9b8d136a1\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qkgxg" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.345969 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dafbb968-6b8e-4aea-94eb-aecfc938ecf0-config-volume\") pod \"dns-default-sgmv4\" (UID: \"dafbb968-6b8e-4aea-94eb-aecfc938ecf0\") " pod="openshift-dns/dns-default-sgmv4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.346012 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cec7db18-2326-4d53-80e6-abce8210a82e-config\") pod \"service-ca-operator-777779d784-cs9f8\" (UID: \"cec7db18-2326-4d53-80e6-abce8210a82e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cs9f8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.346043 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c9e8b78a-82c5-422c-b105-d7169a771c3e-auth-proxy-config\") pod \"machine-config-operator-74547568cd-2dtg5\" (UID: \"c9e8b78a-82c5-422c-b105-d7169a771c3e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2dtg5" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.346067 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/4f00a0d4-1d78-4ad4-808c-373fb017fe95-socket-dir\") pod \"csi-hostpathplugin-227k8\" (UID: \"4f00a0d4-1d78-4ad4-808c-373fb017fe95\") " pod="hostpath-provisioner/csi-hostpathplugin-227k8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.346094 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/926d3e66-e18c-4e73-830c-59e0d75071c7-encryption-config\") pod \"apiserver-7bbb656c7d-xstkp\" (UID: \"926d3e66-e18c-4e73-830c-59e0d75071c7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.346132 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/1b656307-4735-491e-9c35-107eb4bc04a8-certs\") pod \"machine-config-server-s4gbb\" (UID: \"1b656307-4735-491e-9c35-107eb4bc04a8\") " pod="openshift-machine-config-operator/machine-config-server-s4gbb" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.346159 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-br8l5\" (UniqueName: \"kubernetes.io/projected/c120941a-2403-4c12-8383-892fef07e53b-kube-api-access-br8l5\") pod \"dns-operator-744455d44c-gxwm2\" (UID: \"c120941a-2403-4c12-8383-892fef07e53b\") " pod="openshift-dns-operator/dns-operator-744455d44c-gxwm2" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.346185 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/c90da226-e34a-4d72-a64d-132a45439e4d-console-config\") pod \"console-f9d7485db-x7lvf\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.346210 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/4f00a0d4-1d78-4ad4-808c-373fb017fe95-plugins-dir\") pod \"csi-hostpathplugin-227k8\" (UID: \"4f00a0d4-1d78-4ad4-808c-373fb017fe95\") " pod="hostpath-provisioner/csi-hostpathplugin-227k8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.346208 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/362f8698-0eba-4e0b-b11d-ca82cc479688-trusted-ca\") pod \"ingress-operator-5b745b69d9-m9s58\" (UID: \"362f8698-0eba-4e0b-b11d-ca82cc479688\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m9s58" Oct 08 07:19:16 crc kubenswrapper[4693]: E1008 07:19:16.346539 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:16.846529328 +0000 UTC m=+142.217494273 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.346565 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e947463f-afe0-40a7-8f9f-b5d76d2086d0-registry-certificates\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.347408 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/3629c487-2222-4044-a9cd-934694064ec5-signing-cabundle\") pod \"service-ca-9c57cc56f-9rsfp\" (UID: \"3629c487-2222-4044-a9cd-934694064ec5\") " pod="openshift-service-ca/service-ca-9c57cc56f-9rsfp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.347511 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/db2f35c9-5b39-4db5-9ec8-052cff6b8b8b-tmpfs\") pod \"packageserver-d55dfcdfc-n2md8\" (UID: \"db2f35c9-5b39-4db5-9ec8-052cff6b8b8b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-n2md8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.348505 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e947463f-afe0-40a7-8f9f-b5d76d2086d0-trusted-ca\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.348857 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7eb66c1-f2b2-4116-9b68-6ffec11fb8bd-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-f6gdq\" (UID: \"c7eb66c1-f2b2-4116-9b68-6ffec11fb8bd\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6gdq" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.349353 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e414e1c2-aeac-4bb5-9061-07c81c6a4630-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-tk68c\" (UID: \"e414e1c2-aeac-4bb5-9061-07c81c6a4630\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk68c" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.349747 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/c90da226-e34a-4d72-a64d-132a45439e4d-oauth-serving-cert\") pod \"console-f9d7485db-x7lvf\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.350522 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/926d3e66-e18c-4e73-830c-59e0d75071c7-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-xstkp\" (UID: \"926d3e66-e18c-4e73-830c-59e0d75071c7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.350754 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c90da226-e34a-4d72-a64d-132a45439e4d-trusted-ca-bundle\") pod \"console-f9d7485db-x7lvf\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.351388 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4b2104bc-e24a-4bcc-b312-53374a90564c-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-7p2nf\" (UID: \"4b2104bc-e24a-4bcc-b312-53374a90564c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7p2nf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.353131 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c9e8b78a-82c5-422c-b105-d7169a771c3e-proxy-tls\") pod \"machine-config-operator-74547568cd-2dtg5\" (UID: \"c9e8b78a-82c5-422c-b105-d7169a771c3e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2dtg5" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.353688 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/926d3e66-e18c-4e73-830c-59e0d75071c7-audit-dir\") pod \"apiserver-7bbb656c7d-xstkp\" (UID: \"926d3e66-e18c-4e73-830c-59e0d75071c7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.360734 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/30132782-7481-4ce8-957f-759ea66a4eaf-service-ca-bundle\") pod \"router-default-5444994796-blq4r\" (UID: \"30132782-7481-4ce8-957f-759ea66a4eaf\") " pod="openshift-ingress/router-default-5444994796-blq4r" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.361711 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/926d3e66-e18c-4e73-830c-59e0d75071c7-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-xstkp\" (UID: \"926d3e66-e18c-4e73-830c-59e0d75071c7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.362931 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f9aa9747-0f32-4924-a6f3-ffa16180c7a4-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-gsb9l\" (UID: \"f9aa9747-0f32-4924-a6f3-ffa16180c7a4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gsb9l" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.364195 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c90da226-e34a-4d72-a64d-132a45439e4d-service-ca\") pod \"console-f9d7485db-x7lvf\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.366786 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/db2f35c9-5b39-4db5-9ec8-052cff6b8b8b-apiservice-cert\") pod \"packageserver-d55dfcdfc-n2md8\" (UID: \"db2f35c9-5b39-4db5-9ec8-052cff6b8b8b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-n2md8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.368912 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/f9aa9747-0f32-4924-a6f3-ffa16180c7a4-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-gsb9l\" (UID: \"f9aa9747-0f32-4924-a6f3-ffa16180c7a4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gsb9l" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.369292 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c120941a-2403-4c12-8383-892fef07e53b-metrics-tls\") pod \"dns-operator-744455d44c-gxwm2\" (UID: \"c120941a-2403-4c12-8383-892fef07e53b\") " pod="openshift-dns-operator/dns-operator-744455d44c-gxwm2" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.370073 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/02b76fdb-64e7-461a-906e-19cbda64cdc7-profile-collector-cert\") pod \"olm-operator-6b444d44fb-cdzt4\" (UID: \"02b76fdb-64e7-461a-906e-19cbda64cdc7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cdzt4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.370518 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/3629c487-2222-4044-a9cd-934694064ec5-signing-key\") pod \"service-ca-9c57cc56f-9rsfp\" (UID: \"3629c487-2222-4044-a9cd-934694064ec5\") " pod="openshift-service-ca/service-ca-9c57cc56f-9rsfp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.372806 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/30132782-7481-4ce8-957f-759ea66a4eaf-stats-auth\") pod \"router-default-5444994796-blq4r\" (UID: \"30132782-7481-4ce8-957f-759ea66a4eaf\") " pod="openshift-ingress/router-default-5444994796-blq4r" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.373899 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c9e8b78a-82c5-422c-b105-d7169a771c3e-auth-proxy-config\") pod \"machine-config-operator-74547568cd-2dtg5\" (UID: \"c9e8b78a-82c5-422c-b105-d7169a771c3e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2dtg5" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.374839 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/926d3e66-e18c-4e73-830c-59e0d75071c7-audit-policies\") pod \"apiserver-7bbb656c7d-xstkp\" (UID: \"926d3e66-e18c-4e73-830c-59e0d75071c7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.374870 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/926d3e66-e18c-4e73-830c-59e0d75071c7-etcd-client\") pod \"apiserver-7bbb656c7d-xstkp\" (UID: \"926d3e66-e18c-4e73-830c-59e0d75071c7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.375432 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/362f8698-0eba-4e0b-b11d-ca82cc479688-metrics-tls\") pod \"ingress-operator-5b745b69d9-m9s58\" (UID: \"362f8698-0eba-4e0b-b11d-ca82cc479688\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m9s58" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.375876 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/926d3e66-e18c-4e73-830c-59e0d75071c7-encryption-config\") pod \"apiserver-7bbb656c7d-xstkp\" (UID: \"926d3e66-e18c-4e73-830c-59e0d75071c7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.376070 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/db2f35c9-5b39-4db5-9ec8-052cff6b8b8b-webhook-cert\") pod \"packageserver-d55dfcdfc-n2md8\" (UID: \"db2f35c9-5b39-4db5-9ec8-052cff6b8b8b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-n2md8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.386432 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/c90da226-e34a-4d72-a64d-132a45439e4d-console-oauth-config\") pod \"console-f9d7485db-x7lvf\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.387215 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2840730a-aa51-4168-84e9-9db9b8d136a1-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-qkgxg\" (UID: \"2840730a-aa51-4168-84e9-9db9b8d136a1\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qkgxg" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.388400 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e947463f-afe0-40a7-8f9f-b5d76d2086d0-installation-pull-secrets\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.388693 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/30132782-7481-4ce8-957f-759ea66a4eaf-default-certificate\") pod \"router-default-5444994796-blq4r\" (UID: \"30132782-7481-4ce8-957f-759ea66a4eaf\") " pod="openshift-ingress/router-default-5444994796-blq4r" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.389416 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/742a75c4-cb63-480a-961e-c6adaa5835f6-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-k5p8c\" (UID: \"742a75c4-cb63-480a-961e-c6adaa5835f6\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k5p8c" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.390633 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e414e1c2-aeac-4bb5-9061-07c81c6a4630-proxy-tls\") pod \"machine-config-controller-84d6567774-tk68c\" (UID: \"e414e1c2-aeac-4bb5-9061-07c81c6a4630\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk68c" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.393362 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7eb66c1-f2b2-4116-9b68-6ffec11fb8bd-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-f6gdq\" (UID: \"c7eb66c1-f2b2-4116-9b68-6ffec11fb8bd\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6gdq" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.395412 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/02b76fdb-64e7-461a-906e-19cbda64cdc7-srv-cert\") pod \"olm-operator-6b444d44fb-cdzt4\" (UID: \"02b76fdb-64e7-461a-906e-19cbda64cdc7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cdzt4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.397705 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/c90da226-e34a-4d72-a64d-132a45439e4d-console-config\") pod \"console-f9d7485db-x7lvf\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.399971 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/c90da226-e34a-4d72-a64d-132a45439e4d-console-serving-cert\") pod \"console-f9d7485db-x7lvf\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.400934 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/c2d8737c-16dd-429e-a6e0-3d2c35877083-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-9t8ns\" (UID: \"c2d8737c-16dd-429e-a6e0-3d2c35877083\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9t8ns" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.403877 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b2104bc-e24a-4bcc-b312-53374a90564c-config\") pod \"kube-apiserver-operator-766d6c64bb-7p2nf\" (UID: \"4b2104bc-e24a-4bcc-b312-53374a90564c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7p2nf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.404037 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/926d3e66-e18c-4e73-830c-59e0d75071c7-serving-cert\") pod \"apiserver-7bbb656c7d-xstkp\" (UID: \"926d3e66-e18c-4e73-830c-59e0d75071c7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.404377 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e947463f-afe0-40a7-8f9f-b5d76d2086d0-registry-tls\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.407761 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/30132782-7481-4ce8-957f-759ea66a4eaf-metrics-certs\") pod \"router-default-5444994796-blq4r\" (UID: \"30132782-7481-4ce8-957f-759ea66a4eaf\") " pod="openshift-ingress/router-default-5444994796-blq4r" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.420632 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/742a75c4-cb63-480a-961e-c6adaa5835f6-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-k5p8c\" (UID: \"742a75c4-cb63-480a-961e-c6adaa5835f6\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k5p8c" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.422339 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5kgz\" (UniqueName: \"kubernetes.io/projected/02b76fdb-64e7-461a-906e-19cbda64cdc7-kube-api-access-d5kgz\") pod \"olm-operator-6b444d44fb-cdzt4\" (UID: \"02b76fdb-64e7-461a-906e-19cbda64cdc7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cdzt4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.425846 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9sbj4\" (UniqueName: \"kubernetes.io/projected/f9aa9747-0f32-4924-a6f3-ffa16180c7a4-kube-api-access-9sbj4\") pod \"cluster-image-registry-operator-dc59b4c8b-gsb9l\" (UID: \"f9aa9747-0f32-4924-a6f3-ffa16180c7a4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gsb9l" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.426145 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-mtqsq"] Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.428443 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-ss8kz"] Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.431851 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-pwbtw"] Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.435694 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2snw6\" (UniqueName: \"kubernetes.io/projected/926d3e66-e18c-4e73-830c-59e0d75071c7-kube-api-access-2snw6\") pod \"apiserver-7bbb656c7d-xstkp\" (UID: \"926d3e66-e18c-4e73-830c-59e0d75071c7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.447152 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.447476 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gg5n\" (UniqueName: \"kubernetes.io/projected/c9e8b78a-82c5-422c-b105-d7169a771c3e-kube-api-access-8gg5n\") pod \"machine-config-operator-74547568cd-2dtg5\" (UID: \"c9e8b78a-82c5-422c-b105-d7169a771c3e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2dtg5" Oct 08 07:19:16 crc kubenswrapper[4693]: E1008 07:19:16.447713 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:16.947690222 +0000 UTC m=+142.318655157 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.451238 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/1b656307-4735-491e-9c35-107eb4bc04a8-node-bootstrap-token\") pod \"machine-config-server-s4gbb\" (UID: \"1b656307-4735-491e-9c35-107eb4bc04a8\") " pod="openshift-machine-config-operator/machine-config-server-s4gbb" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.451289 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4fa30d38-cd25-470f-9195-b2cc226f3b7b-cert\") pod \"ingress-canary-787bm\" (UID: \"4fa30d38-cd25-470f-9195-b2cc226f3b7b\") " pod="openshift-ingress-canary/ingress-canary-787bm" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.451311 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/4f00a0d4-1d78-4ad4-808c-373fb017fe95-csi-data-dir\") pod \"csi-hostpathplugin-227k8\" (UID: \"4f00a0d4-1d78-4ad4-808c-373fb017fe95\") " pod="hostpath-provisioner/csi-hostpathplugin-227k8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.451340 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pjgt\" (UniqueName: \"kubernetes.io/projected/4fa30d38-cd25-470f-9195-b2cc226f3b7b-kube-api-access-6pjgt\") pod \"ingress-canary-787bm\" (UID: \"4fa30d38-cd25-470f-9195-b2cc226f3b7b\") " pod="openshift-ingress-canary/ingress-canary-787bm" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.451383 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xlch\" (UniqueName: \"kubernetes.io/projected/1b656307-4735-491e-9c35-107eb4bc04a8-kube-api-access-8xlch\") pod \"machine-config-server-s4gbb\" (UID: \"1b656307-4735-491e-9c35-107eb4bc04a8\") " pod="openshift-machine-config-operator/machine-config-server-s4gbb" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.451428 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8stf\" (UniqueName: \"kubernetes.io/projected/4f00a0d4-1d78-4ad4-808c-373fb017fe95-kube-api-access-l8stf\") pod \"csi-hostpathplugin-227k8\" (UID: \"4f00a0d4-1d78-4ad4-808c-373fb017fe95\") " pod="hostpath-provisioner/csi-hostpathplugin-227k8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.451479 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/9073bf89-0d7f-42f0-b1c2-493584fd64ee-profile-collector-cert\") pod \"catalog-operator-68c6474976-ppwh6\" (UID: \"9073bf89-0d7f-42f0-b1c2-493584fd64ee\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ppwh6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.451501 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a9d1eea1-3570-4937-a89b-5c6d87551b30-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-97z2l\" (UID: \"a9d1eea1-3570-4937-a89b-5c6d87551b30\") " pod="openshift-marketplace/marketplace-operator-79b997595-97z2l" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.451538 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a9d1eea1-3570-4937-a89b-5c6d87551b30-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-97z2l\" (UID: \"a9d1eea1-3570-4937-a89b-5c6d87551b30\") " pod="openshift-marketplace/marketplace-operator-79b997595-97z2l" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.451540 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/4f00a0d4-1d78-4ad4-808c-373fb017fe95-csi-data-dir\") pod \"csi-hostpathplugin-227k8\" (UID: \"4f00a0d4-1d78-4ad4-808c-373fb017fe95\") " pod="hostpath-provisioner/csi-hostpathplugin-227k8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.451556 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rc5st\" (UniqueName: \"kubernetes.io/projected/a9d1eea1-3570-4937-a89b-5c6d87551b30-kube-api-access-rc5st\") pod \"marketplace-operator-79b997595-97z2l\" (UID: \"a9d1eea1-3570-4937-a89b-5c6d87551b30\") " pod="openshift-marketplace/marketplace-operator-79b997595-97z2l" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.451670 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dafbb968-6b8e-4aea-94eb-aecfc938ecf0-config-volume\") pod \"dns-default-sgmv4\" (UID: \"dafbb968-6b8e-4aea-94eb-aecfc938ecf0\") " pod="openshift-dns/dns-default-sgmv4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.451699 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cec7db18-2326-4d53-80e6-abce8210a82e-config\") pod \"service-ca-operator-777779d784-cs9f8\" (UID: \"cec7db18-2326-4d53-80e6-abce8210a82e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cs9f8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.451737 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/4f00a0d4-1d78-4ad4-808c-373fb017fe95-socket-dir\") pod \"csi-hostpathplugin-227k8\" (UID: \"4f00a0d4-1d78-4ad4-808c-373fb017fe95\") " pod="hostpath-provisioner/csi-hostpathplugin-227k8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.451770 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/1b656307-4735-491e-9c35-107eb4bc04a8-certs\") pod \"machine-config-server-s4gbb\" (UID: \"1b656307-4735-491e-9c35-107eb4bc04a8\") " pod="openshift-machine-config-operator/machine-config-server-s4gbb" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.451795 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/4f00a0d4-1d78-4ad4-808c-373fb017fe95-plugins-dir\") pod \"csi-hostpathplugin-227k8\" (UID: \"4f00a0d4-1d78-4ad4-808c-373fb017fe95\") " pod="hostpath-provisioner/csi-hostpathplugin-227k8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.451837 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/045e2499-3e6f-4ea0-8036-ba25d897c4da-secret-volume\") pod \"collect-profiles-29331795-kwbx4\" (UID: \"045e2499-3e6f-4ea0-8036-ba25d897c4da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331795-kwbx4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.451862 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzg27\" (UniqueName: \"kubernetes.io/projected/cec7db18-2326-4d53-80e6-abce8210a82e-kube-api-access-nzg27\") pod \"service-ca-operator-777779d784-cs9f8\" (UID: \"cec7db18-2326-4d53-80e6-abce8210a82e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cs9f8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.451884 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mq56z\" (UniqueName: \"kubernetes.io/projected/0e338d6f-e361-4a3d-85cb-6c1f100270f2-kube-api-access-mq56z\") pod \"package-server-manager-789f6589d5-kk6x6\" (UID: \"0e338d6f-e361-4a3d-85cb-6c1f100270f2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kk6x6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.451947 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/dafbb968-6b8e-4aea-94eb-aecfc938ecf0-metrics-tls\") pod \"dns-default-sgmv4\" (UID: \"dafbb968-6b8e-4aea-94eb-aecfc938ecf0\") " pod="openshift-dns/dns-default-sgmv4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.455113 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a9d1eea1-3570-4937-a89b-5c6d87551b30-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-97z2l\" (UID: \"a9d1eea1-3570-4937-a89b-5c6d87551b30\") " pod="openshift-marketplace/marketplace-operator-79b997595-97z2l" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.455424 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/4f00a0d4-1d78-4ad4-808c-373fb017fe95-plugins-dir\") pod \"csi-hostpathplugin-227k8\" (UID: \"4f00a0d4-1d78-4ad4-808c-373fb017fe95\") " pod="hostpath-provisioner/csi-hostpathplugin-227k8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.455668 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/9073bf89-0d7f-42f0-b1c2-493584fd64ee-profile-collector-cert\") pod \"catalog-operator-68c6474976-ppwh6\" (UID: \"9073bf89-0d7f-42f0-b1c2-493584fd64ee\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ppwh6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.455834 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvdn6\" (UniqueName: \"kubernetes.io/projected/dafbb968-6b8e-4aea-94eb-aecfc938ecf0-kube-api-access-gvdn6\") pod \"dns-default-sgmv4\" (UID: \"dafbb968-6b8e-4aea-94eb-aecfc938ecf0\") " pod="openshift-dns/dns-default-sgmv4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.455887 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.455996 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7g45\" (UniqueName: \"kubernetes.io/projected/9073bf89-0d7f-42f0-b1c2-493584fd64ee-kube-api-access-z7g45\") pod \"catalog-operator-68c6474976-ppwh6\" (UID: \"9073bf89-0d7f-42f0-b1c2-493584fd64ee\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ppwh6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.456040 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzc7x\" (UniqueName: \"kubernetes.io/projected/045e2499-3e6f-4ea0-8036-ba25d897c4da-kube-api-access-pzc7x\") pod \"collect-profiles-29331795-kwbx4\" (UID: \"045e2499-3e6f-4ea0-8036-ba25d897c4da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331795-kwbx4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.456059 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/4f00a0d4-1d78-4ad4-808c-373fb017fe95-mountpoint-dir\") pod \"csi-hostpathplugin-227k8\" (UID: \"4f00a0d4-1d78-4ad4-808c-373fb017fe95\") " pod="hostpath-provisioner/csi-hostpathplugin-227k8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.456083 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/045e2499-3e6f-4ea0-8036-ba25d897c4da-config-volume\") pod \"collect-profiles-29331795-kwbx4\" (UID: \"045e2499-3e6f-4ea0-8036-ba25d897c4da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331795-kwbx4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.456090 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dafbb968-6b8e-4aea-94eb-aecfc938ecf0-config-volume\") pod \"dns-default-sgmv4\" (UID: \"dafbb968-6b8e-4aea-94eb-aecfc938ecf0\") " pod="openshift-dns/dns-default-sgmv4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.456103 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/0e338d6f-e361-4a3d-85cb-6c1f100270f2-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-kk6x6\" (UID: \"0e338d6f-e361-4a3d-85cb-6c1f100270f2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kk6x6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.456150 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cec7db18-2326-4d53-80e6-abce8210a82e-serving-cert\") pod \"service-ca-operator-777779d784-cs9f8\" (UID: \"cec7db18-2326-4d53-80e6-abce8210a82e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cs9f8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.456174 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9073bf89-0d7f-42f0-b1c2-493584fd64ee-srv-cert\") pod \"catalog-operator-68c6474976-ppwh6\" (UID: \"9073bf89-0d7f-42f0-b1c2-493584fd64ee\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ppwh6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.456216 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/4f00a0d4-1d78-4ad4-808c-373fb017fe95-registration-dir\") pod \"csi-hostpathplugin-227k8\" (UID: \"4f00a0d4-1d78-4ad4-808c-373fb017fe95\") " pod="hostpath-provisioner/csi-hostpathplugin-227k8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.456322 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/4f00a0d4-1d78-4ad4-808c-373fb017fe95-registration-dir\") pod \"csi-hostpathplugin-227k8\" (UID: \"4f00a0d4-1d78-4ad4-808c-373fb017fe95\") " pod="hostpath-provisioner/csi-hostpathplugin-227k8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.456664 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cec7db18-2326-4d53-80e6-abce8210a82e-config\") pod \"service-ca-operator-777779d784-cs9f8\" (UID: \"cec7db18-2326-4d53-80e6-abce8210a82e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cs9f8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.456974 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/4f00a0d4-1d78-4ad4-808c-373fb017fe95-socket-dir\") pod \"csi-hostpathplugin-227k8\" (UID: \"4f00a0d4-1d78-4ad4-808c-373fb017fe95\") " pod="hostpath-provisioner/csi-hostpathplugin-227k8" Oct 08 07:19:16 crc kubenswrapper[4693]: E1008 07:19:16.461386 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:16.961368759 +0000 UTC m=+142.332333794 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.461506 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cec7db18-2326-4d53-80e6-abce8210a82e-serving-cert\") pod \"service-ca-operator-777779d784-cs9f8\" (UID: \"cec7db18-2326-4d53-80e6-abce8210a82e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cs9f8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.462061 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/1b656307-4735-491e-9c35-107eb4bc04a8-certs\") pod \"machine-config-server-s4gbb\" (UID: \"1b656307-4735-491e-9c35-107eb4bc04a8\") " pod="openshift-machine-config-operator/machine-config-server-s4gbb" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.462095 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/4f00a0d4-1d78-4ad4-808c-373fb017fe95-mountpoint-dir\") pod \"csi-hostpathplugin-227k8\" (UID: \"4f00a0d4-1d78-4ad4-808c-373fb017fe95\") " pod="hostpath-provisioner/csi-hostpathplugin-227k8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.462446 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a9d1eea1-3570-4937-a89b-5c6d87551b30-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-97z2l\" (UID: \"a9d1eea1-3570-4937-a89b-5c6d87551b30\") " pod="openshift-marketplace/marketplace-operator-79b997595-97z2l" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.462781 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/045e2499-3e6f-4ea0-8036-ba25d897c4da-config-volume\") pod \"collect-profiles-29331795-kwbx4\" (UID: \"045e2499-3e6f-4ea0-8036-ba25d897c4da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331795-kwbx4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.463398 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/0e338d6f-e361-4a3d-85cb-6c1f100270f2-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-kk6x6\" (UID: \"0e338d6f-e361-4a3d-85cb-6c1f100270f2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kk6x6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.467271 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc"] Oct 08 07:19:16 crc kubenswrapper[4693]: W1008 07:19:16.467643 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode779da98_f489_4eac_9633_857e35f9d68a.slice/crio-af79c4f34420da78d4f3f70acced24b105948d7db3eb882e8c56fd1fa5e8bbcf WatchSource:0}: Error finding container af79c4f34420da78d4f3f70acced24b105948d7db3eb882e8c56fd1fa5e8bbcf: Status 404 returned error can't find the container with id af79c4f34420da78d4f3f70acced24b105948d7db3eb882e8c56fd1fa5e8bbcf Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.468254 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-sxz2w"] Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.468292 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/045e2499-3e6f-4ea0-8036-ba25d897c4da-secret-volume\") pod \"collect-profiles-29331795-kwbx4\" (UID: \"045e2499-3e6f-4ea0-8036-ba25d897c4da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331795-kwbx4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.469354 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g6md4"] Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.470420 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/dafbb968-6b8e-4aea-94eb-aecfc938ecf0-metrics-tls\") pod \"dns-default-sgmv4\" (UID: \"dafbb968-6b8e-4aea-94eb-aecfc938ecf0\") " pod="openshift-dns/dns-default-sgmv4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.470608 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9073bf89-0d7f-42f0-b1c2-493584fd64ee-srv-cert\") pod \"catalog-operator-68c6474976-ppwh6\" (UID: \"9073bf89-0d7f-42f0-b1c2-493584fd64ee\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ppwh6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.471293 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4fa30d38-cd25-470f-9195-b2cc226f3b7b-cert\") pod \"ingress-canary-787bm\" (UID: \"4fa30d38-cd25-470f-9195-b2cc226f3b7b\") " pod="openshift-ingress-canary/ingress-canary-787bm" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.471361 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/362f8698-0eba-4e0b-b11d-ca82cc479688-bound-sa-token\") pod \"ingress-operator-5b745b69d9-m9s58\" (UID: \"362f8698-0eba-4e0b-b11d-ca82cc479688\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m9s58" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.487196 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/1b656307-4735-491e-9c35-107eb4bc04a8-node-bootstrap-token\") pod \"machine-config-server-s4gbb\" (UID: \"1b656307-4735-491e-9c35-107eb4bc04a8\") " pod="openshift-machine-config-operator/machine-config-server-s4gbb" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.490456 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clv4g\" (UniqueName: \"kubernetes.io/projected/c7eb66c1-f2b2-4116-9b68-6ffec11fb8bd-kube-api-access-clv4g\") pod \"kube-storage-version-migrator-operator-b67b599dd-f6gdq\" (UID: \"c7eb66c1-f2b2-4116-9b68-6ffec11fb8bd\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6gdq" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.513266 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxfjb\" (UniqueName: \"kubernetes.io/projected/362f8698-0eba-4e0b-b11d-ca82cc479688-kube-api-access-jxfjb\") pod \"ingress-operator-5b745b69d9-m9s58\" (UID: \"362f8698-0eba-4e0b-b11d-ca82cc479688\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m9s58" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.524095 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-pcr5x"] Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.532661 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m9s58" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.537118 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-4tc6b"] Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.537174 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-t484z"] Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.540718 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.542842 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzxbt\" (UniqueName: \"kubernetes.io/projected/30132782-7481-4ce8-957f-759ea66a4eaf-kube-api-access-bzxbt\") pod \"router-default-5444994796-blq4r\" (UID: \"30132782-7481-4ce8-957f-759ea66a4eaf\") " pod="openshift-ingress/router-default-5444994796-blq4r" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.548046 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4b2104bc-e24a-4bcc-b312-53374a90564c-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-7p2nf\" (UID: \"4b2104bc-e24a-4bcc-b312-53374a90564c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7p2nf" Oct 08 07:19:16 crc kubenswrapper[4693]: W1008 07:19:16.549989 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod927ac9fe_c982_487c_8258_e137f2ba8cdb.slice/crio-cd04abe9946779742eed56c0692288b799eba6d4c9cea8a9e81b62e7e2f16f4f WatchSource:0}: Error finding container cd04abe9946779742eed56c0692288b799eba6d4c9cea8a9e81b62e7e2f16f4f: Status 404 returned error can't find the container with id cd04abe9946779742eed56c0692288b799eba6d4c9cea8a9e81b62e7e2f16f4f Oct 08 07:19:16 crc kubenswrapper[4693]: W1008 07:19:16.550493 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddb09f9b2_b06b_462e_a750_077bd093f03f.slice/crio-aef0c5cd4392ce203a6d4f651e813333d69f32293319054124387f0e0693bcd1 WatchSource:0}: Error finding container aef0c5cd4392ce203a6d4f651e813333d69f32293319054124387f0e0693bcd1: Status 404 returned error can't find the container with id aef0c5cd4392ce203a6d4f651e813333d69f32293319054124387f0e0693bcd1 Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.557272 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:16 crc kubenswrapper[4693]: E1008 07:19:16.557624 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:17.057582756 +0000 UTC m=+142.428547691 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.557644 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7p2nf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.558561 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:16 crc kubenswrapper[4693]: E1008 07:19:16.558986 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:17.058977854 +0000 UTC m=+142.429942789 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.570744 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcklt\" (UniqueName: \"kubernetes.io/projected/e414e1c2-aeac-4bb5-9061-07c81c6a4630-kube-api-access-xcklt\") pod \"machine-config-controller-84d6567774-tk68c\" (UID: \"e414e1c2-aeac-4bb5-9061-07c81c6a4630\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk68c" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.576452 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6gdq" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.584187 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/742a75c4-cb63-480a-961e-c6adaa5835f6-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-k5p8c\" (UID: \"742a75c4-cb63-480a-961e-c6adaa5835f6\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k5p8c" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.596534 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-blq4r" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.605951 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2dtg5" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.611070 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7768\" (UniqueName: \"kubernetes.io/projected/c90da226-e34a-4d72-a64d-132a45439e4d-kube-api-access-t7768\") pod \"console-f9d7485db-x7lvf\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.614372 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk68c" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.623567 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cdzt4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.627480 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f9aa9747-0f32-4924-a6f3-ffa16180c7a4-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-gsb9l\" (UID: \"f9aa9747-0f32-4924-a6f3-ffa16180c7a4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gsb9l" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.650255 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fs7mc\" (UniqueName: \"kubernetes.io/projected/3629c487-2222-4044-a9cd-934694064ec5-kube-api-access-fs7mc\") pod \"service-ca-9c57cc56f-9rsfp\" (UID: \"3629c487-2222-4044-a9cd-934694064ec5\") " pod="openshift-service-ca/service-ca-9c57cc56f-9rsfp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.659514 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:16 crc kubenswrapper[4693]: E1008 07:19:16.659952 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:17.159936752 +0000 UTC m=+142.530901687 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.668151 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-9rsfp" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.671007 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-br8l5\" (UniqueName: \"kubernetes.io/projected/c120941a-2403-4c12-8383-892fef07e53b-kube-api-access-br8l5\") pod \"dns-operator-744455d44c-gxwm2\" (UID: \"c120941a-2403-4c12-8383-892fef07e53b\") " pod="openshift-dns-operator/dns-operator-744455d44c-gxwm2" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.683482 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gsb9l" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.697756 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzwfz\" (UniqueName: \"kubernetes.io/projected/4bd3780a-bf97-4d18-a493-7ae54cf02750-kube-api-access-lzwfz\") pod \"migrator-59844c95c7-mt9pc\" (UID: \"4bd3780a-bf97-4d18-a493-7ae54cf02750\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mt9pc" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.730923 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkdvk\" (UniqueName: \"kubernetes.io/projected/c2d8737c-16dd-429e-a6e0-3d2c35877083-kube-api-access-tkdvk\") pod \"control-plane-machine-set-operator-78cbb6b69f-9t8ns\" (UID: \"c2d8737c-16dd-429e-a6e0-3d2c35877083\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9t8ns" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.749290 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6srr\" (UniqueName: \"kubernetes.io/projected/db2f35c9-5b39-4db5-9ec8-052cff6b8b8b-kube-api-access-t6srr\") pod \"packageserver-d55dfcdfc-n2md8\" (UID: \"db2f35c9-5b39-4db5-9ec8-052cff6b8b8b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-n2md8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.761801 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:16 crc kubenswrapper[4693]: E1008 07:19:16.762321 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:17.262298298 +0000 UTC m=+142.633263243 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.768124 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4mbl\" (UniqueName: \"kubernetes.io/projected/e947463f-afe0-40a7-8f9f-b5d76d2086d0-kube-api-access-v4mbl\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.786008 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5kwk\" (UniqueName: \"kubernetes.io/projected/2840730a-aa51-4168-84e9-9db9b8d136a1-kube-api-access-k5kwk\") pod \"multus-admission-controller-857f4d67dd-qkgxg\" (UID: \"2840730a-aa51-4168-84e9-9db9b8d136a1\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qkgxg" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.805145 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e947463f-afe0-40a7-8f9f-b5d76d2086d0-bound-sa-token\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.807068 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-gxwm2" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.816085 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k5p8c" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.829961 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-m9s58"] Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.840144 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rc5st\" (UniqueName: \"kubernetes.io/projected/a9d1eea1-3570-4937-a89b-5c6d87551b30-kube-api-access-rc5st\") pod \"marketplace-operator-79b997595-97z2l\" (UID: \"a9d1eea1-3570-4937-a89b-5c6d87551b30\") " pod="openshift-marketplace/marketplace-operator-79b997595-97z2l" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.849306 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.850212 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pjgt\" (UniqueName: \"kubernetes.io/projected/4fa30d38-cd25-470f-9195-b2cc226f3b7b-kube-api-access-6pjgt\") pod \"ingress-canary-787bm\" (UID: \"4fa30d38-cd25-470f-9195-b2cc226f3b7b\") " pod="openshift-ingress-canary/ingress-canary-787bm" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.861433 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7p2nf"] Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.862901 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:16 crc kubenswrapper[4693]: E1008 07:19:16.863365 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:17.363344138 +0000 UTC m=+142.734309073 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.868554 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xlch\" (UniqueName: \"kubernetes.io/projected/1b656307-4735-491e-9c35-107eb4bc04a8-kube-api-access-8xlch\") pod \"machine-config-server-s4gbb\" (UID: \"1b656307-4735-491e-9c35-107eb4bc04a8\") " pod="openshift-machine-config-operator/machine-config-server-s4gbb" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.885259 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9t8ns" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.886619 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp"] Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.890254 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8stf\" (UniqueName: \"kubernetes.io/projected/4f00a0d4-1d78-4ad4-808c-373fb017fe95-kube-api-access-l8stf\") pod \"csi-hostpathplugin-227k8\" (UID: \"4f00a0d4-1d78-4ad4-808c-373fb017fe95\") " pod="hostpath-provisioner/csi-hostpathplugin-227k8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.929126 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mq56z\" (UniqueName: \"kubernetes.io/projected/0e338d6f-e361-4a3d-85cb-6c1f100270f2-kube-api-access-mq56z\") pod \"package-server-manager-789f6589d5-kk6x6\" (UID: \"0e338d6f-e361-4a3d-85cb-6c1f100270f2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kk6x6" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.932213 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mt9pc" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.941999 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-n2md8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.945219 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvdn6\" (UniqueName: \"kubernetes.io/projected/dafbb968-6b8e-4aea-94eb-aecfc938ecf0-kube-api-access-gvdn6\") pod \"dns-default-sgmv4\" (UID: \"dafbb968-6b8e-4aea-94eb-aecfc938ecf0\") " pod="openshift-dns/dns-default-sgmv4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.951930 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzg27\" (UniqueName: \"kubernetes.io/projected/cec7db18-2326-4d53-80e6-abce8210a82e-kube-api-access-nzg27\") pod \"service-ca-operator-777779d784-cs9f8\" (UID: \"cec7db18-2326-4d53-80e6-abce8210a82e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cs9f8" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.955515 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-qkgxg" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.964482 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:16 crc kubenswrapper[4693]: E1008 07:19:16.964884 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:17.464871651 +0000 UTC m=+142.835836586 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.975962 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzc7x\" (UniqueName: \"kubernetes.io/projected/045e2499-3e6f-4ea0-8036-ba25d897c4da-kube-api-access-pzc7x\") pod \"collect-profiles-29331795-kwbx4\" (UID: \"045e2499-3e6f-4ea0-8036-ba25d897c4da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331795-kwbx4" Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.989483 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7g45\" (UniqueName: \"kubernetes.io/projected/9073bf89-0d7f-42f0-b1c2-493584fd64ee-kube-api-access-z7g45\") pod \"catalog-operator-68c6474976-ppwh6\" (UID: \"9073bf89-0d7f-42f0-b1c2-493584fd64ee\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ppwh6" Oct 08 07:19:16 crc kubenswrapper[4693]: W1008 07:19:16.991720 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod926d3e66_e18c_4e73_830c_59e0d75071c7.slice/crio-bad04d8aca2828965a1e54a27c0417f951fdd14c23b4d5c13a4429d9857e202a WatchSource:0}: Error finding container bad04d8aca2828965a1e54a27c0417f951fdd14c23b4d5c13a4429d9857e202a: Status 404 returned error can't find the container with id bad04d8aca2828965a1e54a27c0417f951fdd14c23b4d5c13a4429d9857e202a Oct 08 07:19:16 crc kubenswrapper[4693]: W1008 07:19:16.998029 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod30132782_7481_4ce8_957f_759ea66a4eaf.slice/crio-ccbaf0f3bc03183d46a3c192019c1608541446072d1a76ecf08235e9342b12a9 WatchSource:0}: Error finding container ccbaf0f3bc03183d46a3c192019c1608541446072d1a76ecf08235e9342b12a9: Status 404 returned error can't find the container with id ccbaf0f3bc03183d46a3c192019c1608541446072d1a76ecf08235e9342b12a9 Oct 08 07:19:16 crc kubenswrapper[4693]: I1008 07:19:16.999880 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331795-kwbx4" Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.014912 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-cs9f8" Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.035759 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-97z2l" Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.045712 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kk6x6" Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.060221 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ppwh6" Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.066081 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:17 crc kubenswrapper[4693]: E1008 07:19:17.066266 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:17.56624089 +0000 UTC m=+142.937205825 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.066323 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:17 crc kubenswrapper[4693]: E1008 07:19:17.066729 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:17.566714623 +0000 UTC m=+142.937679558 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.075008 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-sgmv4" Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.099408 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-227k8" Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.111734 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-787bm" Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.123262 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-s4gbb" Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.169137 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:17 crc kubenswrapper[4693]: E1008 07:19:17.169451 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:17.66943427 +0000 UTC m=+143.040399205 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.188144 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6gdq"] Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.218185 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-2dtg5"] Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.272002 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:17 crc kubenswrapper[4693]: E1008 07:19:17.272309 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:17.77229682 +0000 UTC m=+143.143261755 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.280646 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f62ts" event={"ID":"82ab8b8f-7656-4d0f-9829-d222cd26b9aa","Type":"ContainerStarted","Data":"ceee85dbb043a47ebe9a7412456d54571f31699c14a64c9ce40dff138cd5aecb"} Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.280690 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f62ts" event={"ID":"82ab8b8f-7656-4d0f-9829-d222cd26b9aa","Type":"ContainerStarted","Data":"83d4f6f62e3f9754088b4f3fbce10c2cdb6f081db57f638d73ed92b5fab52779"} Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.282049 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-blq4r" event={"ID":"30132782-7481-4ce8-957f-759ea66a4eaf","Type":"ContainerStarted","Data":"ccbaf0f3bc03183d46a3c192019c1608541446072d1a76ecf08235e9342b12a9"} Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.284252 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-pwbtw" event={"ID":"e779da98-f489-4eac-9633-857e35f9d68a","Type":"ContainerStarted","Data":"f751865e3ce1415dcd91b83001e7e1e930a8899ce3532884b39cd40732917a40"} Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.284309 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-pwbtw" event={"ID":"e779da98-f489-4eac-9633-857e35f9d68a","Type":"ContainerStarted","Data":"af79c4f34420da78d4f3f70acced24b105948d7db3eb882e8c56fd1fa5e8bbcf"} Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.285455 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cnxrg" event={"ID":"325ef07c-02ef-45c3-9fc4-ecfe80adbbfe","Type":"ContainerStarted","Data":"6fd3285717ca6258532f44f985eda7d1cfd3bcb8057739e635c90be7d605e2fc"} Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.285483 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cnxrg" event={"ID":"325ef07c-02ef-45c3-9fc4-ecfe80adbbfe","Type":"ContainerStarted","Data":"1b56bdb9539f3c5e934b4b5f31740aac15fb8dfa6c6a0f7582fec2c7cd883a6a"} Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.286364 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mtqsq" event={"ID":"cfd35b00-963c-445b-8a48-a72ecdce0875","Type":"ContainerStarted","Data":"c9a261e1a2f0c2c370315b317e5a91faf61d4eb130e471738f905d5d6c7b805c"} Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.286389 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mtqsq" event={"ID":"cfd35b00-963c-445b-8a48-a72ecdce0875","Type":"ContainerStarted","Data":"92901617740e4c872439149e8d2b3396eca83b9c0596c7be7cd18478ed571873"} Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.287931 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-t484z" event={"ID":"5a760e51-6b0f-431c-ba90-99416b3f215a","Type":"ContainerStarted","Data":"2a767212db435edbaaf23ed0c8e84c8a09341ec79b21dbb80b04d58241ea23d4"} Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.289242 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" event={"ID":"927ac9fe-c982-487c-8258-e137f2ba8cdb","Type":"ContainerStarted","Data":"cd04abe9946779742eed56c0692288b799eba6d4c9cea8a9e81b62e7e2f16f4f"} Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.290252 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-sxz2w" event={"ID":"0e7238a2-53fc-41ac-84a2-7000d86b9ceb","Type":"ContainerStarted","Data":"d7186b28a09975feb2ba9bc6b99e93ef1ae63fa6d6479b4452fa34df8f6248e4"} Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.291682 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g6md4" event={"ID":"abbbb752-3f68-4da0-b2d5-a6962f283a32","Type":"ContainerStarted","Data":"536abf71ab98dd604c0aadce352361604ec73a8df94b27569554fc8d719a4645"} Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.291886 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g6md4" event={"ID":"abbbb752-3f68-4da0-b2d5-a6962f283a32","Type":"ContainerStarted","Data":"195afea31139b40f5cb0bcd4f0ffee385fb61a8a4ecddc6cf5416cdeaa1db8d0"} Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.294483 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-ss8kz" event={"ID":"49ba6508-c23e-4a06-aa9c-eb38fa4e8c08","Type":"ContainerStarted","Data":"63390d29eb7f5d533588a7f8984c72e24b5af8bbdf8a03a003aad4b486ad2b99"} Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.294546 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-ss8kz" event={"ID":"49ba6508-c23e-4a06-aa9c-eb38fa4e8c08","Type":"ContainerStarted","Data":"172398e00b3fc49d65dc31718139a7be8cfd9a75fc0777e47ba6e8bfa4206287"} Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.295730 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc" event={"ID":"f13d519d-a936-4e80-b8a3-f1946cb85ac3","Type":"ContainerStarted","Data":"90819c8ed98934fd3ee65a023f5cb47f1de1183f15345a5c50e5abfca26376d2"} Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.297441 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-29j9r" event={"ID":"c3089122-f24c-493c-ad56-eaa062b4937a","Type":"ContainerStarted","Data":"ba75a6cf0c8185bb4b3012081bfd2a8876c5eea6237a86948506829ed35c159c"} Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.299365 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lmzcq" event={"ID":"e2d82dd2-84a7-49e3-8704-31ff0e0dea1a","Type":"ContainerStarted","Data":"8078817ab064fa0c479c144fcd8c689d27182ab510d77d3855e8cbba5d9b4f5f"} Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.301347 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m9s58" event={"ID":"362f8698-0eba-4e0b-b11d-ca82cc479688","Type":"ContainerStarted","Data":"beabd155d41c510265d4c14b808ecdcad6135c81fa3ddd2e00cb95b208c04fdc"} Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.303609 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-4tc6b" event={"ID":"db09f9b2-b06b-462e-a750-077bd093f03f","Type":"ContainerStarted","Data":"aef0c5cd4392ce203a6d4f651e813333d69f32293319054124387f0e0693bcd1"} Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.304663 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7p2nf" event={"ID":"4b2104bc-e24a-4bcc-b312-53374a90564c","Type":"ContainerStarted","Data":"1315c324c7e34e455b5d42d96961114f0ea5fab301b1ecde0f89808c761349af"} Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.305834 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" event={"ID":"926d3e66-e18c-4e73-830c-59e0d75071c7","Type":"ContainerStarted","Data":"bad04d8aca2828965a1e54a27c0417f951fdd14c23b4d5c13a4429d9857e202a"} Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.308288 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" event={"ID":"e9a6efb0-34a1-4419-a097-14877cb1371c","Type":"ContainerStarted","Data":"024bf21f16485252b88dd8e0617efd1650c93bcc2fa93df769c4ffab6e042b8f"} Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.308783 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.317491 4693 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-rjk9l container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.7:6443/healthz\": dial tcp 10.217.0.7:6443: connect: connection refused" start-of-body= Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.317564 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" podUID="e9a6efb0-34a1-4419-a097-14877cb1371c" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.7:6443/healthz\": dial tcp 10.217.0.7:6443: connect: connection refused" Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.334436 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-gxwm2"] Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.373309 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:17 crc kubenswrapper[4693]: E1008 07:19:17.373493 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:17.873461383 +0000 UTC m=+143.244426318 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.373875 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:17 crc kubenswrapper[4693]: E1008 07:19:17.376122 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:17.876108466 +0000 UTC m=+143.247073501 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.450139 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k5p8c"] Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.479138 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:17 crc kubenswrapper[4693]: E1008 07:19:17.479376 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:17.979341196 +0000 UTC m=+143.350306151 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.479547 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:17 crc kubenswrapper[4693]: E1008 07:19:17.480132 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:17.980122728 +0000 UTC m=+143.351087763 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.580237 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.580327 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-mt9pc"] Oct 08 07:19:17 crc kubenswrapper[4693]: E1008 07:19:17.580554 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:18.08052587 +0000 UTC m=+143.451490805 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.580710 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:17 crc kubenswrapper[4693]: E1008 07:19:17.581105 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:18.081090786 +0000 UTC m=+143.452055721 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:17 crc kubenswrapper[4693]: W1008 07:19:17.615545 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc7eb66c1_f2b2_4116_9b68_6ffec11fb8bd.slice/crio-04ba022a304e9d0eecd2beb023bb89dab2a1212a7b5305a9605ada3114e210ba WatchSource:0}: Error finding container 04ba022a304e9d0eecd2beb023bb89dab2a1212a7b5305a9605ada3114e210ba: Status 404 returned error can't find the container with id 04ba022a304e9d0eecd2beb023bb89dab2a1212a7b5305a9605ada3114e210ba Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.682570 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:17 crc kubenswrapper[4693]: E1008 07:19:17.682902 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:18.182883936 +0000 UTC m=+143.553848871 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.682944 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:17 crc kubenswrapper[4693]: E1008 07:19:17.683365 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:18.18335848 +0000 UTC m=+143.554323415 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.783705 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:17 crc kubenswrapper[4693]: E1008 07:19:17.784565 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:18.284549144 +0000 UTC m=+143.655514079 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.849231 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-x7lvf"] Oct 08 07:19:17 crc kubenswrapper[4693]: I1008 07:19:17.886437 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:17 crc kubenswrapper[4693]: E1008 07:19:17.887597 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:18.387577178 +0000 UTC m=+143.758542183 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.000368 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:18 crc kubenswrapper[4693]: E1008 07:19:18.001232 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:18.501199915 +0000 UTC m=+143.872164850 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.001310 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:18 crc kubenswrapper[4693]: E1008 07:19:18.004798 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:18.504779964 +0000 UTC m=+143.875744899 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.034469 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-dh66b" podStartSLOduration=122.034446473 podStartE2EDuration="2m2.034446473s" podCreationTimestamp="2025-10-08 07:17:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:17.993839762 +0000 UTC m=+143.364804697" watchObservedRunningTime="2025-10-08 07:19:18.034446473 +0000 UTC m=+143.405411408" Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.073775 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-tk68c"] Oct 08 07:19:18 crc kubenswrapper[4693]: W1008 07:19:18.110307 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1b656307_4735_491e_9c35_107eb4bc04a8.slice/crio-eef5829b4bd678a7ac2861031efe3e9a321e99cbbb8ab7ab110b47fd39c30ff3 WatchSource:0}: Error finding container eef5829b4bd678a7ac2861031efe3e9a321e99cbbb8ab7ab110b47fd39c30ff3: Status 404 returned error can't find the container with id eef5829b4bd678a7ac2861031efe3e9a321e99cbbb8ab7ab110b47fd39c30ff3 Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.124026 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:18 crc kubenswrapper[4693]: E1008 07:19:18.124247 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:18.624217072 +0000 UTC m=+143.995182007 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.124438 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:18 crc kubenswrapper[4693]: E1008 07:19:18.125063 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:18.625045095 +0000 UTC m=+143.996010030 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.227184 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:18 crc kubenswrapper[4693]: E1008 07:19:18.227531 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:18.727516444 +0000 UTC m=+144.098481379 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.329431 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:18 crc kubenswrapper[4693]: E1008 07:19:18.329962 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:18.829944672 +0000 UTC m=+144.200909607 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.332447 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-4tc6b" event={"ID":"db09f9b2-b06b-462e-a750-077bd093f03f","Type":"ContainerStarted","Data":"5609e0248e1a5da41dbf646593158d556aa83b3036bd294b470acff9ea605721"} Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.338908 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc" event={"ID":"f13d519d-a936-4e80-b8a3-f1946cb85ac3","Type":"ContainerStarted","Data":"e4832cc2a81e3d152e57b123ee039d8adf7a9972d8e042cf4f83d7606733ac18"} Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.339303 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc" Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.350981 4693 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-ndznc container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.351069 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc" podUID="f13d519d-a936-4e80-b8a3-f1946cb85ac3" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.360166 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mt9pc" event={"ID":"4bd3780a-bf97-4d18-a493-7ae54cf02750","Type":"ContainerStarted","Data":"87b85eb9ed482d113678905bbcaa2343fcb042bb545e8abc8243fe471e603c90"} Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.368487 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" event={"ID":"927ac9fe-c982-487c-8258-e137f2ba8cdb","Type":"ContainerStarted","Data":"1e14bca08628a73687d55ffca86c228c41e8dbad53c9884f9e69eaf887607e04"} Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.370393 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.371742 4693 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-pcr5x container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.371783 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" podUID="927ac9fe-c982-487c-8258-e137f2ba8cdb" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.372268 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gsb9l"] Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.377705 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m9s58" event={"ID":"362f8698-0eba-4e0b-b11d-ca82cc479688","Type":"ContainerStarted","Data":"ccb24a590408776a1d017608a716f9a409674db3e8630cb9bbeda791be1f1153"} Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.392078 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-s4gbb" event={"ID":"1b656307-4735-491e-9c35-107eb4bc04a8","Type":"ContainerStarted","Data":"eef5829b4bd678a7ac2861031efe3e9a321e99cbbb8ab7ab110b47fd39c30ff3"} Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.433363 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.435516 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2dtg5" event={"ID":"c9e8b78a-82c5-422c-b105-d7169a771c3e","Type":"ContainerStarted","Data":"a872e97bc9f3f16bf5ce19a7a88513e22e0de12af0fc82804dd3d6f6c8fbf07f"} Oct 08 07:19:18 crc kubenswrapper[4693]: E1008 07:19:18.436683 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:18.936663489 +0000 UTC m=+144.307628424 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.475034 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-x7lvf" event={"ID":"c90da226-e34a-4d72-a64d-132a45439e4d","Type":"ContainerStarted","Data":"984945048f8e776ff956da2e08d166df4c94f62976d0ab2aa8b140bb196a7461"} Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.486614 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-blq4r" event={"ID":"30132782-7481-4ce8-957f-759ea66a4eaf","Type":"ContainerStarted","Data":"e5cbc95afe43a6a343ec1fa4f7a3dd48e44e6d1d6eb9be9a1e0e6e6600380bbd"} Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.491041 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-sxz2w" event={"ID":"0e7238a2-53fc-41ac-84a2-7000d86b9ceb","Type":"ContainerStarted","Data":"4419c596c9339cf06ec93e5698c53e6fa44cd2184c3dc9e6de6fd2c3ffd86743"} Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.497645 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6gdq" event={"ID":"c7eb66c1-f2b2-4116-9b68-6ffec11fb8bd","Type":"ContainerStarted","Data":"04ba022a304e9d0eecd2beb023bb89dab2a1212a7b5305a9605ada3114e210ba"} Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.502165 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk68c" event={"ID":"e414e1c2-aeac-4bb5-9061-07c81c6a4630","Type":"ContainerStarted","Data":"bade38e086b3d738b0f6b20282b36f5b25d8e3eec781ba167bded88ae82c13b4"} Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.522575 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-29j9r" event={"ID":"c3089122-f24c-493c-ad56-eaa062b4937a","Type":"ContainerStarted","Data":"c8936d157d8032a64c5b5e65b31f4a7e36861bc9711bb2e106ae925e3ef48305"} Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.534703 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:18 crc kubenswrapper[4693]: E1008 07:19:18.535107 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:19.035085557 +0000 UTC m=+144.406050492 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.535936 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f62ts" event={"ID":"82ab8b8f-7656-4d0f-9829-d222cd26b9aa","Type":"ContainerStarted","Data":"32e0febaf3bc974d32d51822e3e8f0da6b5e973c397f5848a9f9b20275ad6c41"} Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.543883 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k5p8c" event={"ID":"742a75c4-cb63-480a-961e-c6adaa5835f6","Type":"ContainerStarted","Data":"93b9d90f80d77b87ca309c9d0f486b52d0d336146a37087154d9025de0a43866"} Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.554143 4693 generic.go:334] "Generic (PLEG): container finished" podID="cfd35b00-963c-445b-8a48-a72ecdce0875" containerID="c9a261e1a2f0c2c370315b317e5a91faf61d4eb130e471738f905d5d6c7b805c" exitCode=0 Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.554717 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mtqsq" event={"ID":"cfd35b00-963c-445b-8a48-a72ecdce0875","Type":"ContainerDied","Data":"c9a261e1a2f0c2c370315b317e5a91faf61d4eb130e471738f905d5d6c7b805c"} Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.559299 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-gxwm2" event={"ID":"c120941a-2403-4c12-8383-892fef07e53b","Type":"ContainerStarted","Data":"6ce97fa708fc9e7b94b98c3bd7b2c7318df75eedabd83e08466f60789da99170"} Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.560005 4693 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-rjk9l container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.7:6443/healthz\": dial tcp 10.217.0.7:6443: connect: connection refused" start-of-body= Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.560110 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" podUID="e9a6efb0-34a1-4419-a097-14877cb1371c" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.7:6443/healthz\": dial tcp 10.217.0.7:6443: connect: connection refused" Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.560369 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-pwbtw" Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.560489 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-ss8kz" Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.587748 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-blq4r" podStartSLOduration=122.58772878 podStartE2EDuration="2m2.58772878s" podCreationTimestamp="2025-10-08 07:17:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:18.581631802 +0000 UTC m=+143.952596737" watchObservedRunningTime="2025-10-08 07:19:18.58772878 +0000 UTC m=+143.958693715" Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.600071 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-blq4r" Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.629428 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" podStartSLOduration=123.629411761 podStartE2EDuration="2m3.629411761s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:18.627089777 +0000 UTC m=+143.998054712" watchObservedRunningTime="2025-10-08 07:19:18.629411761 +0000 UTC m=+144.000376696" Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.637733 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:18 crc kubenswrapper[4693]: E1008 07:19:18.640112 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:19.140093486 +0000 UTC m=+144.511058421 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.671960 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc" podStartSLOduration=122.671941555 podStartE2EDuration="2m2.671941555s" podCreationTimestamp="2025-10-08 07:17:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:18.665975881 +0000 UTC m=+144.036940826" watchObservedRunningTime="2025-10-08 07:19:18.671941555 +0000 UTC m=+144.042906490" Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.677715 4693 patch_prober.go:28] interesting pod/router-default-5444994796-blq4r container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.677809 4693 patch_prober.go:28] interesting pod/console-operator-58897d9998-ss8kz container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.677924 4693 patch_prober.go:28] interesting pod/downloads-7954f5f757-pwbtw container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.677942 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-pwbtw" podUID="e779da98-f489-4eac-9633-857e35f9d68a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.677931 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-ss8kz" podUID="49ba6508-c23e-4a06-aa9c-eb38fa4e8c08" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.677856 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-blq4r" podUID="30132782-7481-4ce8-957f-759ea66a4eaf" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.723126 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-29j9r" podStartSLOduration=123.723104918 podStartE2EDuration="2m3.723104918s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:18.720393783 +0000 UTC m=+144.091358718" watchObservedRunningTime="2025-10-08 07:19:18.723104918 +0000 UTC m=+144.094069853" Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.748923 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:18 crc kubenswrapper[4693]: E1008 07:19:18.749401 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:19.249382714 +0000 UTC m=+144.620347649 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.792949 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f62ts" podStartSLOduration=123.792921365 podStartE2EDuration="2m3.792921365s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:18.762471694 +0000 UTC m=+144.133436629" watchObservedRunningTime="2025-10-08 07:19:18.792921365 +0000 UTC m=+144.163886290" Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.802548 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9t8ns"] Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.803505 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lmzcq" podStartSLOduration=123.803479426 podStartE2EDuration="2m3.803479426s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:18.793986504 +0000 UTC m=+144.164951439" watchObservedRunningTime="2025-10-08 07:19:18.803479426 +0000 UTC m=+144.174444361" Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.849524 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:18 crc kubenswrapper[4693]: E1008 07:19:18.850011 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:19.349994801 +0000 UTC m=+144.720959736 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.883921 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-sxz2w" podStartSLOduration=123.883895767 podStartE2EDuration="2m3.883895767s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:18.874324702 +0000 UTC m=+144.245289657" watchObservedRunningTime="2025-10-08 07:19:18.883895767 +0000 UTC m=+144.254860702" Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.909994 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" podStartSLOduration=123.909973547 podStartE2EDuration="2m3.909973547s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:18.909282948 +0000 UTC m=+144.280247883" watchObservedRunningTime="2025-10-08 07:19:18.909973547 +0000 UTC m=+144.280938482" Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.950866 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ppwh6"] Oct 08 07:19:18 crc kubenswrapper[4693]: I1008 07:19:18.952113 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:18 crc kubenswrapper[4693]: E1008 07:19:18.952688 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:19.452676106 +0000 UTC m=+144.823641041 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.025090 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-4tc6b" podStartSLOduration=124.025067224 podStartE2EDuration="2m4.025067224s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:18.96152162 +0000 UTC m=+144.332486555" watchObservedRunningTime="2025-10-08 07:19:19.025067224 +0000 UTC m=+144.396032149" Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.053751 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:19 crc kubenswrapper[4693]: E1008 07:19:19.054284 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:19.554210469 +0000 UTC m=+144.925175404 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.054673 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:19 crc kubenswrapper[4693]: E1008 07:19:19.055125 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:19.555111964 +0000 UTC m=+144.926076899 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.056120 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cdzt4"] Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.078082 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-pwbtw" podStartSLOduration=124.078056068 podStartE2EDuration="2m4.078056068s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:19.074023046 +0000 UTC m=+144.444987981" watchObservedRunningTime="2025-10-08 07:19:19.078056068 +0000 UTC m=+144.449021003" Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.104349 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-227k8"] Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.117773 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-cs9f8"] Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.133640 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cnxrg" podStartSLOduration=123.133618112 podStartE2EDuration="2m3.133618112s" podCreationTimestamp="2025-10-08 07:17:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:19.133076377 +0000 UTC m=+144.504041312" watchObservedRunningTime="2025-10-08 07:19:19.133618112 +0000 UTC m=+144.504583047" Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.156572 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:19 crc kubenswrapper[4693]: E1008 07:19:19.157013 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:19.656992427 +0000 UTC m=+145.027957362 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.219679 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-ss8kz" podStartSLOduration=124.219654747 podStartE2EDuration="2m4.219654747s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:19.174181432 +0000 UTC m=+144.545146367" watchObservedRunningTime="2025-10-08 07:19:19.219654747 +0000 UTC m=+144.590619682" Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.258253 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:19 crc kubenswrapper[4693]: E1008 07:19:19.258658 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:19.758639324 +0000 UTC m=+145.129604259 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.270093 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g6md4" podStartSLOduration=124.270069459 podStartE2EDuration="2m4.270069459s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:19.223788421 +0000 UTC m=+144.594753376" watchObservedRunningTime="2025-10-08 07:19:19.270069459 +0000 UTC m=+144.641034394" Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.271317 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-qkgxg"] Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.275886 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-9rsfp"] Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.287551 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-97z2l"] Oct 08 07:19:19 crc kubenswrapper[4693]: W1008 07:19:19.353045 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda9d1eea1_3570_4937_a89b_5c6d87551b30.slice/crio-887f00b338307c85ed0512b5ba6302bbefc183495b2dec8ad0b6f9e32c872783 WatchSource:0}: Error finding container 887f00b338307c85ed0512b5ba6302bbefc183495b2dec8ad0b6f9e32c872783: Status 404 returned error can't find the container with id 887f00b338307c85ed0512b5ba6302bbefc183495b2dec8ad0b6f9e32c872783 Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.362150 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:19 crc kubenswrapper[4693]: E1008 07:19:19.362623 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:19.862602574 +0000 UTC m=+145.233567509 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.374170 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kk6x6"] Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.406966 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331795-kwbx4"] Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.414242 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-n2md8"] Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.425759 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-787bm"] Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.426882 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-sgmv4"] Oct 08 07:19:19 crc kubenswrapper[4693]: W1008 07:19:19.448331 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod045e2499_3e6f_4ea0_8036_ba25d897c4da.slice/crio-121ae298b134c8179e4e33806b6f1c67322b5c6adfddd101b60bc8897fdebf21 WatchSource:0}: Error finding container 121ae298b134c8179e4e33806b6f1c67322b5c6adfddd101b60bc8897fdebf21: Status 404 returned error can't find the container with id 121ae298b134c8179e4e33806b6f1c67322b5c6adfddd101b60bc8897fdebf21 Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.464186 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:19 crc kubenswrapper[4693]: E1008 07:19:19.464630 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:19.964614961 +0000 UTC m=+145.335579896 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.566598 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:19 crc kubenswrapper[4693]: E1008 07:19:19.567026 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:20.066983897 +0000 UTC m=+145.437948832 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.588099 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ppwh6" event={"ID":"9073bf89-0d7f-42f0-b1c2-493584fd64ee","Type":"ContainerStarted","Data":"a327b1d4b611904e4ca570cb7ab2629afaabbaeaa27e4e70acbd36909680f026"} Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.588157 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ppwh6" event={"ID":"9073bf89-0d7f-42f0-b1c2-493584fd64ee","Type":"ContainerStarted","Data":"c5e1598998afd65b26741bc47c1de97e3795d2d2457a96967cde0a940ba1a22b"} Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.588688 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ppwh6" Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.590668 4693 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-ppwh6 container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" start-of-body= Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.590715 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ppwh6" podUID="9073bf89-0d7f-42f0-b1c2-493584fd64ee" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.621131 4693 patch_prober.go:28] interesting pod/router-default-5444994796-blq4r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 08 07:19:19 crc kubenswrapper[4693]: [-]has-synced failed: reason withheld Oct 08 07:19:19 crc kubenswrapper[4693]: [+]process-running ok Oct 08 07:19:19 crc kubenswrapper[4693]: healthz check failed Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.621935 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-blq4r" podUID="30132782-7481-4ce8-957f-759ea66a4eaf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.632154 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-qkgxg" event={"ID":"2840730a-aa51-4168-84e9-9db9b8d136a1","Type":"ContainerStarted","Data":"9be7b874fa5e4897265ea0149a8cdeedaba95221457a1ea3a640c23a3a88195b"} Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.634227 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ppwh6" podStartSLOduration=123.634215154 podStartE2EDuration="2m3.634215154s" podCreationTimestamp="2025-10-08 07:17:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:19.63262758 +0000 UTC m=+145.003592515" watchObservedRunningTime="2025-10-08 07:19:19.634215154 +0000 UTC m=+145.005180079" Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.651788 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-227k8" event={"ID":"4f00a0d4-1d78-4ad4-808c-373fb017fe95","Type":"ContainerStarted","Data":"b57dab8b056a02ffdc1948b5ce1b132b4729dda83bb7deeee7aca2dff2711520"} Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.670356 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.673518 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-9rsfp" event={"ID":"3629c487-2222-4044-a9cd-934694064ec5","Type":"ContainerStarted","Data":"d74fffd71a83f99ddfe5cc351f7aa7ba9edb20d1e5fe4d1edede7c1db43995c5"} Oct 08 07:19:19 crc kubenswrapper[4693]: E1008 07:19:19.674974 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:20.174950428 +0000 UTC m=+145.545915563 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.675508 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331795-kwbx4" event={"ID":"045e2499-3e6f-4ea0-8036-ba25d897c4da","Type":"ContainerStarted","Data":"121ae298b134c8179e4e33806b6f1c67322b5c6adfddd101b60bc8897fdebf21"} Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.676225 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-n2md8" event={"ID":"db2f35c9-5b39-4db5-9ec8-052cff6b8b8b","Type":"ContainerStarted","Data":"99d89108d00c9cff2d7d026ecb914c7fab36879084f9442779d569e15e13218c"} Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.716916 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk68c" event={"ID":"e414e1c2-aeac-4bb5-9061-07c81c6a4630","Type":"ContainerStarted","Data":"56dfa2e0ca428747e8b56b926ad9330e1a64144f03cb851334d5d725b30a37ac"} Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.716992 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk68c" event={"ID":"e414e1c2-aeac-4bb5-9061-07c81c6a4630","Type":"ContainerStarted","Data":"b8a22914c0fdc0721b000ffc5eb31d1ebafa8697631c6cb1b31b8ed2effef787"} Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.727403 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gsb9l" event={"ID":"f9aa9747-0f32-4924-a6f3-ffa16180c7a4","Type":"ContainerStarted","Data":"1f9edda1b38f2d680d5b9c119c04b9878fbf2cb4edfbb926065b285934c52ae7"} Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.727464 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gsb9l" event={"ID":"f9aa9747-0f32-4924-a6f3-ffa16180c7a4","Type":"ContainerStarted","Data":"1220418b607a4129c5061254b0e0edf413c399ec51ea8057eacaede9088049fb"} Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.773290 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:19 crc kubenswrapper[4693]: E1008 07:19:19.776731 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:20.276703968 +0000 UTC m=+145.647668903 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.786668 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk68c" podStartSLOduration=123.786647732 podStartE2EDuration="2m3.786647732s" podCreationTimestamp="2025-10-08 07:17:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:19.748855889 +0000 UTC m=+145.119820834" watchObservedRunningTime="2025-10-08 07:19:19.786647732 +0000 UTC m=+145.157612667" Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.797774 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2dtg5" event={"ID":"c9e8b78a-82c5-422c-b105-d7169a771c3e","Type":"ContainerStarted","Data":"32f90d57be7b876e1d14ed3eacd25d2c74cb6dba096f7423e389e46233fc6779"} Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.797836 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2dtg5" event={"ID":"c9e8b78a-82c5-422c-b105-d7169a771c3e","Type":"ContainerStarted","Data":"1dc0e372436aeb5de9313c990b852da39eb83296766d33806eccdabbe040919e"} Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.816216 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-s4gbb" event={"ID":"1b656307-4735-491e-9c35-107eb4bc04a8","Type":"ContainerStarted","Data":"e8e83d0f6d65a779b1b0f0392739931df410b24a71e048ad0dd6fae105477dc9"} Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.852379 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gsb9l" podStartSLOduration=124.852358067 podStartE2EDuration="2m4.852358067s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:19.80395395 +0000 UTC m=+145.174918885" watchObservedRunningTime="2025-10-08 07:19:19.852358067 +0000 UTC m=+145.223323002" Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.853350 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mtqsq" event={"ID":"cfd35b00-963c-445b-8a48-a72ecdce0875","Type":"ContainerStarted","Data":"2a63eca59c0e5c865b9913ed7d10551216915386cbf8fb747d2340740005ba1f"} Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.854278 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mtqsq" Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.867218 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kk6x6" event={"ID":"0e338d6f-e361-4a3d-85cb-6c1f100270f2","Type":"ContainerStarted","Data":"f7b8cf4ce087a77a9f76887adb410457108871526e83022ad9aa31bfecb5aa28"} Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.874776 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:19 crc kubenswrapper[4693]: E1008 07:19:19.879077 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:20.379059754 +0000 UTC m=+145.750024689 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.898652 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2dtg5" podStartSLOduration=123.898634075 podStartE2EDuration="2m3.898634075s" podCreationTimestamp="2025-10-08 07:17:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:19.855075572 +0000 UTC m=+145.226040507" watchObservedRunningTime="2025-10-08 07:19:19.898634075 +0000 UTC m=+145.269599010" Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.910790 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7p2nf" event={"ID":"4b2104bc-e24a-4bcc-b312-53374a90564c","Type":"ContainerStarted","Data":"70b9cdfb65df36a5826fdfc67c85f5abcc85bff21bac155bddbed422d278123b"} Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.921847 4693 generic.go:334] "Generic (PLEG): container finished" podID="926d3e66-e18c-4e73-830c-59e0d75071c7" containerID="16533a1d5f585f13ee58a4053df679c711b382ed908c1e6cd24824541d8e40d0" exitCode=0 Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.921954 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" event={"ID":"926d3e66-e18c-4e73-830c-59e0d75071c7","Type":"ContainerDied","Data":"16533a1d5f585f13ee58a4053df679c711b382ed908c1e6cd24824541d8e40d0"} Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.926468 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m9s58" event={"ID":"362f8698-0eba-4e0b-b11d-ca82cc479688","Type":"ContainerStarted","Data":"21d897c500fe06096029650bce09fc29afe885dddc7011614365acb6ac625aab"} Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.940671 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mtqsq" podStartSLOduration=124.940637424 podStartE2EDuration="2m4.940637424s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:19.899945701 +0000 UTC m=+145.270910646" watchObservedRunningTime="2025-10-08 07:19:19.940637424 +0000 UTC m=+145.311602359" Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.959330 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mt9pc" event={"ID":"4bd3780a-bf97-4d18-a493-7ae54cf02750","Type":"ContainerStarted","Data":"d12981135a5a6c0a7513c2d39533f6b0492c762191bbc1f05ce942bf2a57106d"} Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.959381 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mt9pc" event={"ID":"4bd3780a-bf97-4d18-a493-7ae54cf02750","Type":"ContainerStarted","Data":"b9abe889e6febbf9780d98f8c73ec7fc7c05c36d566be0b10c3f8ca3550d0d2a"} Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.967243 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-s4gbb" podStartSLOduration=6.967225728 podStartE2EDuration="6.967225728s" podCreationTimestamp="2025-10-08 07:19:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:19.943479123 +0000 UTC m=+145.314444058" watchObservedRunningTime="2025-10-08 07:19:19.967225728 +0000 UTC m=+145.338190663" Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.968262 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7p2nf" podStartSLOduration=123.968256717 podStartE2EDuration="2m3.968256717s" podCreationTimestamp="2025-10-08 07:17:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:19.96621395 +0000 UTC m=+145.337178885" watchObservedRunningTime="2025-10-08 07:19:19.968256717 +0000 UTC m=+145.339221652" Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.981397 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k5p8c" event={"ID":"742a75c4-cb63-480a-961e-c6adaa5835f6","Type":"ContainerStarted","Data":"7bae28ba8f6e3193f4f7ad3faa04940c52e3ba8ed40af95cf8303714ba4f841c"} Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.981730 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:19 crc kubenswrapper[4693]: E1008 07:19:19.982204 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:20.482175171 +0000 UTC m=+145.853140106 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.982533 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:19 crc kubenswrapper[4693]: E1008 07:19:19.982839 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:20.482830989 +0000 UTC m=+145.853795924 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.984314 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6gdq" event={"ID":"c7eb66c1-f2b2-4116-9b68-6ffec11fb8bd","Type":"ContainerStarted","Data":"c7d2fad81f95428df41d409471f7e8f5caa7bc3fbfdfd0eb1466ff988c33631d"} Oct 08 07:19:19 crc kubenswrapper[4693]: I1008 07:19:19.988590 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-787bm" event={"ID":"4fa30d38-cd25-470f-9195-b2cc226f3b7b","Type":"ContainerStarted","Data":"7da92894fc89e2c775fd9d5b09cd9983dc8ce3b936b260b98f9e3b608fbd738a"} Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.004516 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m9s58" podStartSLOduration=125.004491787 podStartE2EDuration="2m5.004491787s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:20.003421638 +0000 UTC m=+145.374386573" watchObservedRunningTime="2025-10-08 07:19:20.004491787 +0000 UTC m=+145.375456722" Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.044522 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9t8ns" event={"ID":"c2d8737c-16dd-429e-a6e0-3d2c35877083","Type":"ContainerStarted","Data":"7e89eb04a322c9f4cc495314f2772528127af2b0c55f6d24234984073a388d72"} Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.044606 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9t8ns" event={"ID":"c2d8737c-16dd-429e-a6e0-3d2c35877083","Type":"ContainerStarted","Data":"ccaa55041395359fb14f61b8250d54efcbb7437237e6ca32cbf081eb6efa818e"} Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.086568 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:20 crc kubenswrapper[4693]: E1008 07:19:20.088048 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:20.588029014 +0000 UTC m=+145.958993949 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.099815 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-97z2l" event={"ID":"a9d1eea1-3570-4937-a89b-5c6d87551b30","Type":"ContainerStarted","Data":"887f00b338307c85ed0512b5ba6302bbefc183495b2dec8ad0b6f9e32c872783"} Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.100841 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-97z2l" Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.103962 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6gdq" podStartSLOduration=125.103949884 podStartE2EDuration="2m5.103949884s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:20.076982859 +0000 UTC m=+145.447947794" watchObservedRunningTime="2025-10-08 07:19:20.103949884 +0000 UTC m=+145.474914819" Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.104117 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mt9pc" podStartSLOduration=124.104112448 podStartE2EDuration="2m4.104112448s" podCreationTimestamp="2025-10-08 07:17:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:20.103212053 +0000 UTC m=+145.474176988" watchObservedRunningTime="2025-10-08 07:19:20.104112448 +0000 UTC m=+145.475077383" Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.114177 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-sgmv4" event={"ID":"dafbb968-6b8e-4aea-94eb-aecfc938ecf0","Type":"ContainerStarted","Data":"46314b264d0f4ab2ec6a92fb476e8e9c9d29f2bd71657cceecc676a6c9f87e9a"} Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.128968 4693 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-97z2l container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.129038 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-97z2l" podUID="a9d1eea1-3570-4937-a89b-5c6d87551b30" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.132307 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-gxwm2" event={"ID":"c120941a-2403-4c12-8383-892fef07e53b","Type":"ContainerStarted","Data":"f0626df1bb8ac3b557886a947be400f33a210d9a38f0ff822fc4d543233d14bd"} Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.144478 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-k5p8c" podStartSLOduration=124.144460802 podStartE2EDuration="2m4.144460802s" podCreationTimestamp="2025-10-08 07:17:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:20.143130205 +0000 UTC m=+145.514095140" watchObservedRunningTime="2025-10-08 07:19:20.144460802 +0000 UTC m=+145.515425737" Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.164938 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9t8ns" podStartSLOduration=124.164920597 podStartE2EDuration="2m4.164920597s" podCreationTimestamp="2025-10-08 07:17:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:20.163383124 +0000 UTC m=+145.534348059" watchObservedRunningTime="2025-10-08 07:19:20.164920597 +0000 UTC m=+145.535885532" Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.181978 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-cs9f8" event={"ID":"cec7db18-2326-4d53-80e6-abce8210a82e","Type":"ContainerStarted","Data":"244eeba906d19fa5c3697a3ba3a4ef7ea25d3293ce8ee7a7c948f90168ee34da"} Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.183050 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cdzt4" event={"ID":"02b76fdb-64e7-461a-906e-19cbda64cdc7","Type":"ContainerStarted","Data":"679f52cfed37de68794dbeef4d18025667aeb66f9a5daf1c2fbbd17127b84446"} Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.183073 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cdzt4" event={"ID":"02b76fdb-64e7-461a-906e-19cbda64cdc7","Type":"ContainerStarted","Data":"9e9e13c9950cc0254b5ca95fc2aede5c2bb933d1316b80625990653925f16fa3"} Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.183979 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cdzt4" Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.185056 4693 generic.go:334] "Generic (PLEG): container finished" podID="5a760e51-6b0f-431c-ba90-99416b3f215a" containerID="b5a6291ee67be311bd2a2c3952ddff00d32fd915334b2a5e21a191e9cb11cc2f" exitCode=0 Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.185635 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-t484z" event={"ID":"5a760e51-6b0f-431c-ba90-99416b3f215a","Type":"ContainerDied","Data":"b5a6291ee67be311bd2a2c3952ddff00d32fd915334b2a5e21a191e9cb11cc2f"} Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.189842 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:20 crc kubenswrapper[4693]: E1008 07:19:20.190228 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:20.690215305 +0000 UTC m=+146.061180230 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.202086 4693 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-cdzt4 container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" start-of-body= Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.202175 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cdzt4" podUID="02b76fdb-64e7-461a-906e-19cbda64cdc7" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.209758 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-x7lvf" event={"ID":"c90da226-e34a-4d72-a64d-132a45439e4d","Type":"ContainerStarted","Data":"330e52d8e7f31edebf241f467b20234db56f0d59a6bba2348db8a915a89563e6"} Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.212559 4693 patch_prober.go:28] interesting pod/downloads-7954f5f757-pwbtw container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.212615 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-pwbtw" podUID="e779da98-f489-4eac-9633-857e35f9d68a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.216994 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-gxwm2" podStartSLOduration=125.216967734 podStartE2EDuration="2m5.216967734s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:20.213780536 +0000 UTC m=+145.584745471" watchObservedRunningTime="2025-10-08 07:19:20.216967734 +0000 UTC m=+145.587932669" Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.226044 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc" Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.229221 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.238378 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.296479 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:20 crc kubenswrapper[4693]: E1008 07:19:20.298073 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:20.798031452 +0000 UTC m=+146.168996387 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.309520 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-97z2l" podStartSLOduration=124.309499979 podStartE2EDuration="2m4.309499979s" podCreationTimestamp="2025-10-08 07:17:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:20.272014674 +0000 UTC m=+145.642979609" watchObservedRunningTime="2025-10-08 07:19:20.309499979 +0000 UTC m=+145.680464914" Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.399080 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:20 crc kubenswrapper[4693]: E1008 07:19:20.399559 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:20.899542355 +0000 UTC m=+146.270507290 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.430183 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-cs9f8" podStartSLOduration=124.43015867 podStartE2EDuration="2m4.43015867s" podCreationTimestamp="2025-10-08 07:17:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:20.359995553 +0000 UTC m=+145.730960488" watchObservedRunningTime="2025-10-08 07:19:20.43015867 +0000 UTC m=+145.801123605" Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.500538 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:20 crc kubenswrapper[4693]: E1008 07:19:20.500972 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:21.000953765 +0000 UTC m=+146.371918700 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.603046 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:20 crc kubenswrapper[4693]: E1008 07:19:20.603505 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:21.103490386 +0000 UTC m=+146.474455321 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.641091 4693 patch_prober.go:28] interesting pod/router-default-5444994796-blq4r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 08 07:19:20 crc kubenswrapper[4693]: [-]has-synced failed: reason withheld Oct 08 07:19:20 crc kubenswrapper[4693]: [+]process-running ok Oct 08 07:19:20 crc kubenswrapper[4693]: healthz check failed Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.641159 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-blq4r" podUID="30132782-7481-4ce8-957f-759ea66a4eaf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.672140 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-x7lvf" podStartSLOduration=125.672118701 podStartE2EDuration="2m5.672118701s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:20.669254342 +0000 UTC m=+146.040219277" watchObservedRunningTime="2025-10-08 07:19:20.672118701 +0000 UTC m=+146.043083636" Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.708697 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:20 crc kubenswrapper[4693]: E1008 07:19:20.708939 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:21.208901997 +0000 UTC m=+146.579866932 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.709468 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:20 crc kubenswrapper[4693]: E1008 07:19:20.709946 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:21.209918285 +0000 UTC m=+146.580883220 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.733027 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-ss8kz" Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.745759 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cdzt4" podStartSLOduration=124.745721493 podStartE2EDuration="2m4.745721493s" podCreationTimestamp="2025-10-08 07:17:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:20.744552271 +0000 UTC m=+146.115517206" watchObservedRunningTime="2025-10-08 07:19:20.745721493 +0000 UTC m=+146.116686428" Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.815455 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:20 crc kubenswrapper[4693]: E1008 07:19:20.815847 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:21.315827539 +0000 UTC m=+146.686792474 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:20 crc kubenswrapper[4693]: I1008 07:19:20.918903 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:20 crc kubenswrapper[4693]: E1008 07:19:20.919309 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:21.419289876 +0000 UTC m=+146.790254811 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.019730 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:21 crc kubenswrapper[4693]: E1008 07:19:21.020603 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:21.520587483 +0000 UTC m=+146.891552418 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.121890 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:21 crc kubenswrapper[4693]: E1008 07:19:21.122350 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:21.622331272 +0000 UTC m=+146.993296207 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.221116 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kk6x6" event={"ID":"0e338d6f-e361-4a3d-85cb-6c1f100270f2","Type":"ContainerStarted","Data":"8e94a597ec410b64649a810c5fed3d3937c0fa774492cfc073678b123db67430"} Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.221203 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kk6x6" event={"ID":"0e338d6f-e361-4a3d-85cb-6c1f100270f2","Type":"ContainerStarted","Data":"6fdc73e8377ce25c75507fbefbad7a67c1512211dc25afca23165753ae297bf3"} Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.221267 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kk6x6" Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.222709 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:21 crc kubenswrapper[4693]: E1008 07:19:21.223059 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:21.723032572 +0000 UTC m=+147.093997507 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.223235 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:21 crc kubenswrapper[4693]: E1008 07:19:21.223558 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:21.723544497 +0000 UTC m=+147.094509432 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.225004 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-qkgxg" event={"ID":"2840730a-aa51-4168-84e9-9db9b8d136a1","Type":"ContainerStarted","Data":"fbd64e139d02df8d527a82ead8717f5890eb8390bcc3f35299ffb48d0e86ec0e"} Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.225034 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-qkgxg" event={"ID":"2840730a-aa51-4168-84e9-9db9b8d136a1","Type":"ContainerStarted","Data":"33c0178af2a899758d49d6869b4fbf3a98c5832a6613a5f6c29aaba1631c87f1"} Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.235463 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-9rsfp" event={"ID":"3629c487-2222-4044-a9cd-934694064ec5","Type":"ContainerStarted","Data":"f9420d33041835d1b61eadd629c908459e6ffc3fb0739880c5c9d6e019b5e4ea"} Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.245243 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-787bm" event={"ID":"4fa30d38-cd25-470f-9195-b2cc226f3b7b","Type":"ContainerStarted","Data":"434f4e5ac2f7e646c31300083c357162866a9425c2fc75bc8be23125d0b5d567"} Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.265141 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-t484z" event={"ID":"5a760e51-6b0f-431c-ba90-99416b3f215a","Type":"ContainerStarted","Data":"d23734b058457e19a25f6e8eaa6c35501c81e8fecb643f07d38490a42076a4ce"} Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.279017 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kk6x6" podStartSLOduration=125.278996728 podStartE2EDuration="2m5.278996728s" podCreationTimestamp="2025-10-08 07:17:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:21.276350805 +0000 UTC m=+146.647315740" watchObservedRunningTime="2025-10-08 07:19:21.278996728 +0000 UTC m=+146.649961663" Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.282117 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-cs9f8" event={"ID":"cec7db18-2326-4d53-80e6-abce8210a82e","Type":"ContainerStarted","Data":"54ecd8bab81fa3051c27b108f06a7c766940329d6dc3143902bbaa7d9b89f4a4"} Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.300768 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" event={"ID":"926d3e66-e18c-4e73-830c-59e0d75071c7","Type":"ContainerStarted","Data":"01f7224943b9f782b224c1127c0971ee7d1e9e05953bd4d20ae51fa735d671ee"} Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.307286 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-n2md8" event={"ID":"db2f35c9-5b39-4db5-9ec8-052cff6b8b8b","Type":"ContainerStarted","Data":"c3c6cfe4e5e2ea9a725c97f854a7fb5f3336a481978b1084d41a05d67b505b9b"} Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.307780 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-n2md8" Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.309179 4693 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-n2md8 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.27:5443/healthz\": dial tcp 10.217.0.27:5443: connect: connection refused" start-of-body= Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.309239 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-n2md8" podUID="db2f35c9-5b39-4db5-9ec8-052cff6b8b8b" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.27:5443/healthz\": dial tcp 10.217.0.27:5443: connect: connection refused" Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.316352 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-sgmv4" event={"ID":"dafbb968-6b8e-4aea-94eb-aecfc938ecf0","Type":"ContainerStarted","Data":"ef33a37661eb7f633ee63b8fafc1a6388887334ac15cd54a879224a2b4f7332b"} Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.316414 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-sgmv4" event={"ID":"dafbb968-6b8e-4aea-94eb-aecfc938ecf0","Type":"ContainerStarted","Data":"179a34866478a882ba6f1492d90bfd250302e37e2125f95b0b2e379935ea54e6"} Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.316529 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-sgmv4" Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.318004 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-97z2l" event={"ID":"a9d1eea1-3570-4937-a89b-5c6d87551b30","Type":"ContainerStarted","Data":"183ed83e4617e87f634107d80c50c6ae890b6042d71a02db36f5aa999f730163"} Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.319700 4693 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-97z2l container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.319743 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-97z2l" podUID="a9d1eea1-3570-4937-a89b-5c6d87551b30" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.325055 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:21 crc kubenswrapper[4693]: E1008 07:19:21.326948 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:21.826928671 +0000 UTC m=+147.197893606 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.349470 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-gxwm2" event={"ID":"c120941a-2403-4c12-8383-892fef07e53b","Type":"ContainerStarted","Data":"065ad9af9e3e1f14bc95ee4113db4004165f84f1116b2622aaf078e20ac863dc"} Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.363410 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331795-kwbx4" event={"ID":"045e2499-3e6f-4ea0-8036-ba25d897c4da","Type":"ContainerStarted","Data":"128cd73c137022c3f73a692754f06e5433217f4c599de5d00e541e729da6fee1"} Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.378925 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-qkgxg" podStartSLOduration=125.378895546 podStartE2EDuration="2m5.378895546s" podCreationTimestamp="2025-10-08 07:17:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:21.324415722 +0000 UTC m=+146.695380657" watchObservedRunningTime="2025-10-08 07:19:21.378895546 +0000 UTC m=+146.749860481" Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.406978 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cdzt4" Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.422289 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ppwh6" Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.427472 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:21 crc kubenswrapper[4693]: E1008 07:19:21.440095 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:21.940077505 +0000 UTC m=+147.311042440 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.460606 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-787bm" podStartSLOduration=8.460581021 podStartE2EDuration="8.460581021s" podCreationTimestamp="2025-10-08 07:19:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:21.383357999 +0000 UTC m=+146.754322934" watchObservedRunningTime="2025-10-08 07:19:21.460581021 +0000 UTC m=+146.831545956" Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.471748 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-9rsfp" podStartSLOduration=125.471719119 podStartE2EDuration="2m5.471719119s" podCreationTimestamp="2025-10-08 07:17:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:21.450282027 +0000 UTC m=+146.821246962" watchObservedRunningTime="2025-10-08 07:19:21.471719119 +0000 UTC m=+146.842684054" Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.529766 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:21 crc kubenswrapper[4693]: E1008 07:19:21.530185 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:22.030164553 +0000 UTC m=+147.401129488 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.543552 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.544565 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.563768 4693 patch_prober.go:28] interesting pod/apiserver-7bbb656c7d-xstkp container/oauth-apiserver namespace/openshift-oauth-apiserver: Startup probe status=failure output="Get \"https://10.217.0.17:8443/livez\": dial tcp 10.217.0.17:8443: connect: connection refused" start-of-body= Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.563842 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" podUID="926d3e66-e18c-4e73-830c-59e0d75071c7" containerName="oauth-apiserver" probeResult="failure" output="Get \"https://10.217.0.17:8443/livez\": dial tcp 10.217.0.17:8443: connect: connection refused" Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.586154 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29331795-kwbx4" podStartSLOduration=126.586135058 podStartE2EDuration="2m6.586135058s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:21.558704471 +0000 UTC m=+146.929669406" watchObservedRunningTime="2025-10-08 07:19:21.586135058 +0000 UTC m=+146.957099993" Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.607157 4693 patch_prober.go:28] interesting pod/router-default-5444994796-blq4r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 08 07:19:21 crc kubenswrapper[4693]: [-]has-synced failed: reason withheld Oct 08 07:19:21 crc kubenswrapper[4693]: [+]process-running ok Oct 08 07:19:21 crc kubenswrapper[4693]: healthz check failed Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.607232 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-blq4r" podUID="30132782-7481-4ce8-957f-759ea66a4eaf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.640591 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:21 crc kubenswrapper[4693]: E1008 07:19:21.640995 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:22.140984153 +0000 UTC m=+147.511949088 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.686770 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" podStartSLOduration=125.686746516 podStartE2EDuration="2m5.686746516s" podCreationTimestamp="2025-10-08 07:17:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:21.639651256 +0000 UTC m=+147.010616191" watchObservedRunningTime="2025-10-08 07:19:21.686746516 +0000 UTC m=+147.057711451" Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.688588 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-n2md8" podStartSLOduration=125.688582007 podStartE2EDuration="2m5.688582007s" podCreationTimestamp="2025-10-08 07:17:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:21.685966765 +0000 UTC m=+147.056931700" watchObservedRunningTime="2025-10-08 07:19:21.688582007 +0000 UTC m=+147.059546942" Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.746198 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:21 crc kubenswrapper[4693]: E1008 07:19:21.746712 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:22.246688971 +0000 UTC m=+147.617653906 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.764171 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-sgmv4" podStartSLOduration=8.764143893 podStartE2EDuration="8.764143893s" podCreationTimestamp="2025-10-08 07:19:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:21.763546467 +0000 UTC m=+147.134511402" watchObservedRunningTime="2025-10-08 07:19:21.764143893 +0000 UTC m=+147.135108828" Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.848408 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:21 crc kubenswrapper[4693]: E1008 07:19:21.848883 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:22.348869112 +0000 UTC m=+147.719834047 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:21 crc kubenswrapper[4693]: I1008 07:19:21.949253 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:21 crc kubenswrapper[4693]: E1008 07:19:21.949656 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:22.449635845 +0000 UTC m=+147.820600780 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.037666 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.038438 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.042320 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.045411 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.050889 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:22 crc kubenswrapper[4693]: E1008 07:19:22.051264 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:22.55124366 +0000 UTC m=+147.922208595 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.055460 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.152301 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:22 crc kubenswrapper[4693]: E1008 07:19:22.152555 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:22.652490156 +0000 UTC m=+148.023455101 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.152795 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.152913 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4de73ab3-cbbb-4383-9915-9bcc9d556054-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"4de73ab3-cbbb-4383-9915-9bcc9d556054\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.153167 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4de73ab3-cbbb-4383-9915-9bcc9d556054-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"4de73ab3-cbbb-4383-9915-9bcc9d556054\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 08 07:19:22 crc kubenswrapper[4693]: E1008 07:19:22.153226 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:22.653197245 +0000 UTC m=+148.024162180 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.253866 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:22 crc kubenswrapper[4693]: E1008 07:19:22.254099 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:22.75405944 +0000 UTC m=+148.125024375 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.254353 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.254413 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4de73ab3-cbbb-4383-9915-9bcc9d556054-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"4de73ab3-cbbb-4383-9915-9bcc9d556054\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.254485 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4de73ab3-cbbb-4383-9915-9bcc9d556054-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"4de73ab3-cbbb-4383-9915-9bcc9d556054\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.254517 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4de73ab3-cbbb-4383-9915-9bcc9d556054-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"4de73ab3-cbbb-4383-9915-9bcc9d556054\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 08 07:19:22 crc kubenswrapper[4693]: E1008 07:19:22.254725 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:22.754716578 +0000 UTC m=+148.125681513 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.339466 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4de73ab3-cbbb-4383-9915-9bcc9d556054-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"4de73ab3-cbbb-4383-9915-9bcc9d556054\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.352714 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.360498 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:22 crc kubenswrapper[4693]: E1008 07:19:22.360701 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:22.860665993 +0000 UTC m=+148.231630928 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.360751 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:22 crc kubenswrapper[4693]: E1008 07:19:22.361193 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:22.861181637 +0000 UTC m=+148.232146752 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.388543 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-227k8" event={"ID":"4f00a0d4-1d78-4ad4-808c-373fb017fe95","Type":"ContainerStarted","Data":"faf214f30c4edf8f57fb17e6c8e591f4391c41906686eb658b9e1147ccf16554"} Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.439040 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-t484z" event={"ID":"5a760e51-6b0f-431c-ba90-99416b3f215a","Type":"ContainerStarted","Data":"e58d75a40dd7859f2b299953cb5c42f6bc9ae9dfe372fc1d39fc910e67e65da7"} Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.440435 4693 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-97z2l container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.440475 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-97z2l" podUID="a9d1eea1-3570-4937-a89b-5c6d87551b30" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.462600 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:22 crc kubenswrapper[4693]: E1008 07:19:22.462986 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:22.962964017 +0000 UTC m=+148.333928952 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.504166 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-t484z" podStartSLOduration=127.504145404 podStartE2EDuration="2m7.504145404s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:22.498595731 +0000 UTC m=+147.869560686" watchObservedRunningTime="2025-10-08 07:19:22.504145404 +0000 UTC m=+147.875110339" Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.564085 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:22 crc kubenswrapper[4693]: E1008 07:19:22.565522 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:23.065498758 +0000 UTC m=+148.436463693 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.603743 4693 patch_prober.go:28] interesting pod/router-default-5444994796-blq4r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 08 07:19:22 crc kubenswrapper[4693]: [-]has-synced failed: reason withheld Oct 08 07:19:22 crc kubenswrapper[4693]: [+]process-running ok Oct 08 07:19:22 crc kubenswrapper[4693]: healthz check failed Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.603854 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-blq4r" podUID="30132782-7481-4ce8-957f-759ea66a4eaf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.665164 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:22 crc kubenswrapper[4693]: E1008 07:19:22.665718 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:23.165698705 +0000 UTC m=+148.536663640 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.767198 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:22 crc kubenswrapper[4693]: E1008 07:19:22.768050 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:23.268035681 +0000 UTC m=+148.639000616 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.869341 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:22 crc kubenswrapper[4693]: E1008 07:19:22.869893 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:23.369872452 +0000 UTC m=+148.740837387 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:22 crc kubenswrapper[4693]: I1008 07:19:22.972787 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:22 crc kubenswrapper[4693]: E1008 07:19:22.973859 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:23.473839303 +0000 UTC m=+148.844804228 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.020860 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.027762 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mtqsq" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.074500 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:23 crc kubenswrapper[4693]: E1008 07:19:23.074968 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:23.574946675 +0000 UTC m=+148.945911610 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.159586 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dd458"] Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.160697 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dd458" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.164470 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.176442 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dd458"] Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.177198 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:23 crc kubenswrapper[4693]: E1008 07:19:23.177676 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:23.67764728 +0000 UTC m=+149.048612215 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.254891 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-n2md8" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.277958 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.278218 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.278246 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.278282 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca170ba2-9128-4ba1-9084-c2373299a0d9-catalog-content\") pod \"community-operators-dd458\" (UID: \"ca170ba2-9128-4ba1-9084-c2373299a0d9\") " pod="openshift-marketplace/community-operators-dd458" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.278342 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.278372 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.278393 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lg99b\" (UniqueName: \"kubernetes.io/projected/ca170ba2-9128-4ba1-9084-c2373299a0d9-kube-api-access-lg99b\") pod \"community-operators-dd458\" (UID: \"ca170ba2-9128-4ba1-9084-c2373299a0d9\") " pod="openshift-marketplace/community-operators-dd458" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.278413 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca170ba2-9128-4ba1-9084-c2373299a0d9-utilities\") pod \"community-operators-dd458\" (UID: \"ca170ba2-9128-4ba1-9084-c2373299a0d9\") " pod="openshift-marketplace/community-operators-dd458" Oct 08 07:19:23 crc kubenswrapper[4693]: E1008 07:19:23.278532 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:23.778517415 +0000 UTC m=+149.149482350 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.287993 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.288062 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.288966 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.294978 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.381134 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lg99b\" (UniqueName: \"kubernetes.io/projected/ca170ba2-9128-4ba1-9084-c2373299a0d9-kube-api-access-lg99b\") pod \"community-operators-dd458\" (UID: \"ca170ba2-9128-4ba1-9084-c2373299a0d9\") " pod="openshift-marketplace/community-operators-dd458" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.381986 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca170ba2-9128-4ba1-9084-c2373299a0d9-utilities\") pod \"community-operators-dd458\" (UID: \"ca170ba2-9128-4ba1-9084-c2373299a0d9\") " pod="openshift-marketplace/community-operators-dd458" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.382122 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.382347 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca170ba2-9128-4ba1-9084-c2373299a0d9-catalog-content\") pod \"community-operators-dd458\" (UID: \"ca170ba2-9128-4ba1-9084-c2373299a0d9\") " pod="openshift-marketplace/community-operators-dd458" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.382654 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.383718 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca170ba2-9128-4ba1-9084-c2373299a0d9-utilities\") pod \"community-operators-dd458\" (UID: \"ca170ba2-9128-4ba1-9084-c2373299a0d9\") " pod="openshift-marketplace/community-operators-dd458" Oct 08 07:19:23 crc kubenswrapper[4693]: E1008 07:19:23.384138 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:23.884118131 +0000 UTC m=+149.255083066 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.385529 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca170ba2-9128-4ba1-9084-c2373299a0d9-catalog-content\") pod \"community-operators-dd458\" (UID: \"ca170ba2-9128-4ba1-9084-c2373299a0d9\") " pod="openshift-marketplace/community-operators-dd458" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.399249 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.410283 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7x6qs"] Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.411738 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7x6qs"] Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.411995 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7x6qs" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.434665 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.440778 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lg99b\" (UniqueName: \"kubernetes.io/projected/ca170ba2-9128-4ba1-9084-c2373299a0d9-kube-api-access-lg99b\") pod \"community-operators-dd458\" (UID: \"ca170ba2-9128-4ba1-9084-c2373299a0d9\") " pod="openshift-marketplace/community-operators-dd458" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.481665 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.483554 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.483895 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4f7b0f9-4902-4768-94ae-0c98e7814f9f-utilities\") pod \"certified-operators-7x6qs\" (UID: \"b4f7b0f9-4902-4768-94ae-0c98e7814f9f\") " pod="openshift-marketplace/certified-operators-7x6qs" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.483966 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwr64\" (UniqueName: \"kubernetes.io/projected/b4f7b0f9-4902-4768-94ae-0c98e7814f9f-kube-api-access-jwr64\") pod \"certified-operators-7x6qs\" (UID: \"b4f7b0f9-4902-4768-94ae-0c98e7814f9f\") " pod="openshift-marketplace/certified-operators-7x6qs" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.484000 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4f7b0f9-4902-4768-94ae-0c98e7814f9f-catalog-content\") pod \"certified-operators-7x6qs\" (UID: \"b4f7b0f9-4902-4768-94ae-0c98e7814f9f\") " pod="openshift-marketplace/certified-operators-7x6qs" Oct 08 07:19:23 crc kubenswrapper[4693]: E1008 07:19:23.484240 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:23.984214595 +0000 UTC m=+149.355179530 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.488153 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dd458" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.499022 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.499078 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.502623 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-227k8" event={"ID":"4f00a0d4-1d78-4ad4-808c-373fb017fe95","Type":"ContainerStarted","Data":"0fed134dba8a6dcf374a5c61cdd7f11c7363c9b14a4d70446e959b9e976640fa"} Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.506353 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"4de73ab3-cbbb-4383-9915-9bcc9d556054","Type":"ContainerStarted","Data":"a1b562337689441a04917c7227fc86de76c4e714614235820fd93cee6104b472"} Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.521698 4693 generic.go:334] "Generic (PLEG): container finished" podID="045e2499-3e6f-4ea0-8036-ba25d897c4da" containerID="128cd73c137022c3f73a692754f06e5433217f4c599de5d00e541e729da6fee1" exitCode=0 Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.522858 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331795-kwbx4" event={"ID":"045e2499-3e6f-4ea0-8036-ba25d897c4da","Type":"ContainerDied","Data":"128cd73c137022c3f73a692754f06e5433217f4c599de5d00e541e729da6fee1"} Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.567891 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4tz77"] Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.569044 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4tz77" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.580263 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4tz77"] Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.591590 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwr64\" (UniqueName: \"kubernetes.io/projected/b4f7b0f9-4902-4768-94ae-0c98e7814f9f-kube-api-access-jwr64\") pod \"certified-operators-7x6qs\" (UID: \"b4f7b0f9-4902-4768-94ae-0c98e7814f9f\") " pod="openshift-marketplace/certified-operators-7x6qs" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.592110 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4f7b0f9-4902-4768-94ae-0c98e7814f9f-catalog-content\") pod \"certified-operators-7x6qs\" (UID: \"b4f7b0f9-4902-4768-94ae-0c98e7814f9f\") " pod="openshift-marketplace/certified-operators-7x6qs" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.592788 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.593137 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4f7b0f9-4902-4768-94ae-0c98e7814f9f-utilities\") pod \"certified-operators-7x6qs\" (UID: \"b4f7b0f9-4902-4768-94ae-0c98e7814f9f\") " pod="openshift-marketplace/certified-operators-7x6qs" Oct 08 07:19:23 crc kubenswrapper[4693]: E1008 07:19:23.596700 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:24.0966821 +0000 UTC m=+149.467647035 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.597693 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4f7b0f9-4902-4768-94ae-0c98e7814f9f-catalog-content\") pod \"certified-operators-7x6qs\" (UID: \"b4f7b0f9-4902-4768-94ae-0c98e7814f9f\") " pod="openshift-marketplace/certified-operators-7x6qs" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.598362 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4f7b0f9-4902-4768-94ae-0c98e7814f9f-utilities\") pod \"certified-operators-7x6qs\" (UID: \"b4f7b0f9-4902-4768-94ae-0c98e7814f9f\") " pod="openshift-marketplace/certified-operators-7x6qs" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.620087 4693 patch_prober.go:28] interesting pod/router-default-5444994796-blq4r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 08 07:19:23 crc kubenswrapper[4693]: [-]has-synced failed: reason withheld Oct 08 07:19:23 crc kubenswrapper[4693]: [+]process-running ok Oct 08 07:19:23 crc kubenswrapper[4693]: healthz check failed Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.620677 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-blq4r" podUID="30132782-7481-4ce8-957f-759ea66a4eaf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.634241 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwr64\" (UniqueName: \"kubernetes.io/projected/b4f7b0f9-4902-4768-94ae-0c98e7814f9f-kube-api-access-jwr64\") pod \"certified-operators-7x6qs\" (UID: \"b4f7b0f9-4902-4768-94ae-0c98e7814f9f\") " pod="openshift-marketplace/certified-operators-7x6qs" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.700256 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.700870 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/17d04184-b555-437c-b6bb-91a4e13263d8-utilities\") pod \"community-operators-4tz77\" (UID: \"17d04184-b555-437c-b6bb-91a4e13263d8\") " pod="openshift-marketplace/community-operators-4tz77" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.700978 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hb8ff\" (UniqueName: \"kubernetes.io/projected/17d04184-b555-437c-b6bb-91a4e13263d8-kube-api-access-hb8ff\") pod \"community-operators-4tz77\" (UID: \"17d04184-b555-437c-b6bb-91a4e13263d8\") " pod="openshift-marketplace/community-operators-4tz77" Oct 08 07:19:23 crc kubenswrapper[4693]: E1008 07:19:23.701167 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:24.201117704 +0000 UTC m=+149.572082639 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.701462 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/17d04184-b555-437c-b6bb-91a4e13263d8-catalog-content\") pod \"community-operators-4tz77\" (UID: \"17d04184-b555-437c-b6bb-91a4e13263d8\") " pod="openshift-marketplace/community-operators-4tz77" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.739242 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-sf677"] Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.741683 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sf677" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.807607 4693 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.808992 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.809560 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/17d04184-b555-437c-b6bb-91a4e13263d8-utilities\") pod \"community-operators-4tz77\" (UID: \"17d04184-b555-437c-b6bb-91a4e13263d8\") " pod="openshift-marketplace/community-operators-4tz77" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.809695 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hb8ff\" (UniqueName: \"kubernetes.io/projected/17d04184-b555-437c-b6bb-91a4e13263d8-kube-api-access-hb8ff\") pod \"community-operators-4tz77\" (UID: \"17d04184-b555-437c-b6bb-91a4e13263d8\") " pod="openshift-marketplace/community-operators-4tz77" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.809796 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de56a156-ef96-4b18-bd73-d72965f7de18-catalog-content\") pod \"certified-operators-sf677\" (UID: \"de56a156-ef96-4b18-bd73-d72965f7de18\") " pod="openshift-marketplace/certified-operators-sf677" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.809916 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/17d04184-b555-437c-b6bb-91a4e13263d8-catalog-content\") pod \"community-operators-4tz77\" (UID: \"17d04184-b555-437c-b6bb-91a4e13263d8\") " pod="openshift-marketplace/community-operators-4tz77" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.810008 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m22w8\" (UniqueName: \"kubernetes.io/projected/de56a156-ef96-4b18-bd73-d72965f7de18-kube-api-access-m22w8\") pod \"certified-operators-sf677\" (UID: \"de56a156-ef96-4b18-bd73-d72965f7de18\") " pod="openshift-marketplace/certified-operators-sf677" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.810105 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de56a156-ef96-4b18-bd73-d72965f7de18-utilities\") pod \"certified-operators-sf677\" (UID: \"de56a156-ef96-4b18-bd73-d72965f7de18\") " pod="openshift-marketplace/certified-operators-sf677" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.810350 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7x6qs" Oct 08 07:19:23 crc kubenswrapper[4693]: E1008 07:19:23.811143 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:24.311126301 +0000 UTC m=+149.682091236 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.811755 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/17d04184-b555-437c-b6bb-91a4e13263d8-utilities\") pod \"community-operators-4tz77\" (UID: \"17d04184-b555-437c-b6bb-91a4e13263d8\") " pod="openshift-marketplace/community-operators-4tz77" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.812355 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/17d04184-b555-437c-b6bb-91a4e13263d8-catalog-content\") pod \"community-operators-4tz77\" (UID: \"17d04184-b555-437c-b6bb-91a4e13263d8\") " pod="openshift-marketplace/community-operators-4tz77" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.841156 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sf677"] Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.855675 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hb8ff\" (UniqueName: \"kubernetes.io/projected/17d04184-b555-437c-b6bb-91a4e13263d8-kube-api-access-hb8ff\") pod \"community-operators-4tz77\" (UID: \"17d04184-b555-437c-b6bb-91a4e13263d8\") " pod="openshift-marketplace/community-operators-4tz77" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.895205 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4tz77" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.911886 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.912194 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m22w8\" (UniqueName: \"kubernetes.io/projected/de56a156-ef96-4b18-bd73-d72965f7de18-kube-api-access-m22w8\") pod \"certified-operators-sf677\" (UID: \"de56a156-ef96-4b18-bd73-d72965f7de18\") " pod="openshift-marketplace/certified-operators-sf677" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.912220 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de56a156-ef96-4b18-bd73-d72965f7de18-utilities\") pod \"certified-operators-sf677\" (UID: \"de56a156-ef96-4b18-bd73-d72965f7de18\") " pod="openshift-marketplace/certified-operators-sf677" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.912284 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de56a156-ef96-4b18-bd73-d72965f7de18-catalog-content\") pod \"certified-operators-sf677\" (UID: \"de56a156-ef96-4b18-bd73-d72965f7de18\") " pod="openshift-marketplace/certified-operators-sf677" Oct 08 07:19:23 crc kubenswrapper[4693]: E1008 07:19:23.913005 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:24.412974793 +0000 UTC m=+149.783939728 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.924212 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de56a156-ef96-4b18-bd73-d72965f7de18-catalog-content\") pod \"certified-operators-sf677\" (UID: \"de56a156-ef96-4b18-bd73-d72965f7de18\") " pod="openshift-marketplace/certified-operators-sf677" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.924496 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de56a156-ef96-4b18-bd73-d72965f7de18-utilities\") pod \"certified-operators-sf677\" (UID: \"de56a156-ef96-4b18-bd73-d72965f7de18\") " pod="openshift-marketplace/certified-operators-sf677" Oct 08 07:19:23 crc kubenswrapper[4693]: I1008 07:19:23.949271 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m22w8\" (UniqueName: \"kubernetes.io/projected/de56a156-ef96-4b18-bd73-d72965f7de18-kube-api-access-m22w8\") pod \"certified-operators-sf677\" (UID: \"de56a156-ef96-4b18-bd73-d72965f7de18\") " pod="openshift-marketplace/certified-operators-sf677" Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.018608 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:24 crc kubenswrapper[4693]: E1008 07:19:24.019185 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:24.519163355 +0000 UTC m=+149.890128290 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.084226 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sf677" Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.120164 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:24 crc kubenswrapper[4693]: E1008 07:19:24.120776 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:24.620759411 +0000 UTC m=+149.991724346 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.227223 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:24 crc kubenswrapper[4693]: E1008 07:19:24.227859 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:24.727843717 +0000 UTC m=+150.098808652 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.328571 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:24 crc kubenswrapper[4693]: E1008 07:19:24.328873 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:24.828833366 +0000 UTC m=+150.199798301 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.328958 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:24 crc kubenswrapper[4693]: E1008 07:19:24.329550 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:24.829543225 +0000 UTC m=+150.200508150 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.431914 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:24 crc kubenswrapper[4693]: E1008 07:19:24.432091 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-08 07:19:24.932043275 +0000 UTC m=+150.303008210 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.432191 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:24 crc kubenswrapper[4693]: E1008 07:19:24.432628 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-08 07:19:24.932610721 +0000 UTC m=+150.303575656 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m4zw6" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.510061 4693 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-10-08T07:19:23.807872761Z","Handler":null,"Name":""} Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.516307 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dd458"] Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.523184 4693 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.523223 4693 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.535300 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.546930 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"344ff87bc642b05e3d8c21af53ba8dad012f58f21082ba099f045f56ac542e6f"} Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.551209 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-227k8" event={"ID":"4f00a0d4-1d78-4ad4-808c-373fb017fe95","Type":"ContainerStarted","Data":"a33b0bbbaed8483f5737dae7fa868119993b771293bad70bb49d27ee3e1a7a69"} Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.552039 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"5f0954f2752a19141363ae755e7923cb622a8f81e7dedbaa23ddb6d427f10740"} Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.553266 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"4de73ab3-cbbb-4383-9915-9bcc9d556054","Type":"ContainerStarted","Data":"7c6ef2d695ea7464acfb0b809d5a1aaa0b2e63c908836b6c997b2ba35f792ef8"} Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.561765 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.565577 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"66b921fea62edb56dd4f74e7cdfcdb23a549e713ebd73dc97c9d5edd8c4a1eb8"} Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.599208 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7x6qs"] Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.621045 4693 patch_prober.go:28] interesting pod/router-default-5444994796-blq4r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 08 07:19:24 crc kubenswrapper[4693]: [-]has-synced failed: reason withheld Oct 08 07:19:24 crc kubenswrapper[4693]: [+]process-running ok Oct 08 07:19:24 crc kubenswrapper[4693]: healthz check failed Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.621108 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-blq4r" podUID="30132782-7481-4ce8-957f-759ea66a4eaf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.638282 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.654084 4693 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.654140 4693 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.767465 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4tz77"] Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.815774 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m4zw6\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.828307 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sf677"] Oct 08 07:19:24 crc kubenswrapper[4693]: I1008 07:19:24.967522 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.023201 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331795-kwbx4" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.149431 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzc7x\" (UniqueName: \"kubernetes.io/projected/045e2499-3e6f-4ea0-8036-ba25d897c4da-kube-api-access-pzc7x\") pod \"045e2499-3e6f-4ea0-8036-ba25d897c4da\" (UID: \"045e2499-3e6f-4ea0-8036-ba25d897c4da\") " Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.149554 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/045e2499-3e6f-4ea0-8036-ba25d897c4da-secret-volume\") pod \"045e2499-3e6f-4ea0-8036-ba25d897c4da\" (UID: \"045e2499-3e6f-4ea0-8036-ba25d897c4da\") " Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.149592 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/045e2499-3e6f-4ea0-8036-ba25d897c4da-config-volume\") pod \"045e2499-3e6f-4ea0-8036-ba25d897c4da\" (UID: \"045e2499-3e6f-4ea0-8036-ba25d897c4da\") " Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.150588 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/045e2499-3e6f-4ea0-8036-ba25d897c4da-config-volume" (OuterVolumeSpecName: "config-volume") pod "045e2499-3e6f-4ea0-8036-ba25d897c4da" (UID: "045e2499-3e6f-4ea0-8036-ba25d897c4da"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.158598 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/045e2499-3e6f-4ea0-8036-ba25d897c4da-kube-api-access-pzc7x" (OuterVolumeSpecName: "kube-api-access-pzc7x") pod "045e2499-3e6f-4ea0-8036-ba25d897c4da" (UID: "045e2499-3e6f-4ea0-8036-ba25d897c4da"). InnerVolumeSpecName "kube-api-access-pzc7x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.159546 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/045e2499-3e6f-4ea0-8036-ba25d897c4da-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "045e2499-3e6f-4ea0-8036-ba25d897c4da" (UID: "045e2499-3e6f-4ea0-8036-ba25d897c4da"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.220155 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-m4zw6"] Oct 08 07:19:25 crc kubenswrapper[4693]: W1008 07:19:25.227878 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode947463f_afe0_40a7_8f9f_b5d76d2086d0.slice/crio-04f4dbbe536199ba80e2a6febf093475698cfc850ae2b7080bb68e91e84f61f2 WatchSource:0}: Error finding container 04f4dbbe536199ba80e2a6febf093475698cfc850ae2b7080bb68e91e84f61f2: Status 404 returned error can't find the container with id 04f4dbbe536199ba80e2a6febf093475698cfc850ae2b7080bb68e91e84f61f2 Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.251767 4693 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/045e2499-3e6f-4ea0-8036-ba25d897c4da-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.251808 4693 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/045e2499-3e6f-4ea0-8036-ba25d897c4da-config-volume\") on node \"crc\" DevicePath \"\"" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.251838 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzc7x\" (UniqueName: \"kubernetes.io/projected/045e2499-3e6f-4ea0-8036-ba25d897c4da-kube-api-access-pzc7x\") on node \"crc\" DevicePath \"\"" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.325863 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rxdhl"] Oct 08 07:19:25 crc kubenswrapper[4693]: E1008 07:19:25.326389 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="045e2499-3e6f-4ea0-8036-ba25d897c4da" containerName="collect-profiles" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.326478 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="045e2499-3e6f-4ea0-8036-ba25d897c4da" containerName="collect-profiles" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.326674 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="045e2499-3e6f-4ea0-8036-ba25d897c4da" containerName="collect-profiles" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.327626 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rxdhl" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.331234 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.339059 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rxdhl"] Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.388263 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.461985 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b509fff2-7ab7-4e9a-9057-5fda5fa66f41-catalog-content\") pod \"redhat-marketplace-rxdhl\" (UID: \"b509fff2-7ab7-4e9a-9057-5fda5fa66f41\") " pod="openshift-marketplace/redhat-marketplace-rxdhl" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.462570 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b509fff2-7ab7-4e9a-9057-5fda5fa66f41-utilities\") pod \"redhat-marketplace-rxdhl\" (UID: \"b509fff2-7ab7-4e9a-9057-5fda5fa66f41\") " pod="openshift-marketplace/redhat-marketplace-rxdhl" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.462742 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x985n\" (UniqueName: \"kubernetes.io/projected/b509fff2-7ab7-4e9a-9057-5fda5fa66f41-kube-api-access-x985n\") pod \"redhat-marketplace-rxdhl\" (UID: \"b509fff2-7ab7-4e9a-9057-5fda5fa66f41\") " pod="openshift-marketplace/redhat-marketplace-rxdhl" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.563980 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b509fff2-7ab7-4e9a-9057-5fda5fa66f41-utilities\") pod \"redhat-marketplace-rxdhl\" (UID: \"b509fff2-7ab7-4e9a-9057-5fda5fa66f41\") " pod="openshift-marketplace/redhat-marketplace-rxdhl" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.564031 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x985n\" (UniqueName: \"kubernetes.io/projected/b509fff2-7ab7-4e9a-9057-5fda5fa66f41-kube-api-access-x985n\") pod \"redhat-marketplace-rxdhl\" (UID: \"b509fff2-7ab7-4e9a-9057-5fda5fa66f41\") " pod="openshift-marketplace/redhat-marketplace-rxdhl" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.564103 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b509fff2-7ab7-4e9a-9057-5fda5fa66f41-catalog-content\") pod \"redhat-marketplace-rxdhl\" (UID: \"b509fff2-7ab7-4e9a-9057-5fda5fa66f41\") " pod="openshift-marketplace/redhat-marketplace-rxdhl" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.564615 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b509fff2-7ab7-4e9a-9057-5fda5fa66f41-catalog-content\") pod \"redhat-marketplace-rxdhl\" (UID: \"b509fff2-7ab7-4e9a-9057-5fda5fa66f41\") " pod="openshift-marketplace/redhat-marketplace-rxdhl" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.564957 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b509fff2-7ab7-4e9a-9057-5fda5fa66f41-utilities\") pod \"redhat-marketplace-rxdhl\" (UID: \"b509fff2-7ab7-4e9a-9057-5fda5fa66f41\") " pod="openshift-marketplace/redhat-marketplace-rxdhl" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.578184 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"b45a91712a4cd881b982ad9e7891666114ddd43ddeca69a838d70dc17d27e84f"} Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.582318 4693 generic.go:334] "Generic (PLEG): container finished" podID="b4f7b0f9-4902-4768-94ae-0c98e7814f9f" containerID="e4ac3330065a975da60f91aa31da25989fcc03bc61e34f33badb519e887d5e4e" exitCode=0 Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.582417 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7x6qs" event={"ID":"b4f7b0f9-4902-4768-94ae-0c98e7814f9f","Type":"ContainerDied","Data":"e4ac3330065a975da60f91aa31da25989fcc03bc61e34f33badb519e887d5e4e"} Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.582448 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7x6qs" event={"ID":"b4f7b0f9-4902-4768-94ae-0c98e7814f9f","Type":"ContainerStarted","Data":"06d3b0407f968505e38d1e9020e3ac0f39e35dd34baa285ccdc38497a8da4b26"} Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.584215 4693 generic.go:334] "Generic (PLEG): container finished" podID="4de73ab3-cbbb-4383-9915-9bcc9d556054" containerID="7c6ef2d695ea7464acfb0b809d5a1aaa0b2e63c908836b6c997b2ba35f792ef8" exitCode=0 Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.584257 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"4de73ab3-cbbb-4383-9915-9bcc9d556054","Type":"ContainerDied","Data":"7c6ef2d695ea7464acfb0b809d5a1aaa0b2e63c908836b6c997b2ba35f792ef8"} Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.584832 4693 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.588978 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331795-kwbx4" event={"ID":"045e2499-3e6f-4ea0-8036-ba25d897c4da","Type":"ContainerDied","Data":"121ae298b134c8179e4e33806b6f1c67322b5c6adfddd101b60bc8897fdebf21"} Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.589026 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="121ae298b134c8179e4e33806b6f1c67322b5c6adfddd101b60bc8897fdebf21" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.589126 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331795-kwbx4" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.592104 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" event={"ID":"e947463f-afe0-40a7-8f9f-b5d76d2086d0","Type":"ContainerStarted","Data":"2ade496d2bfc78a5b1cf224bca00a50add4e2e5dd6359b20616aac48ec595757"} Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.592140 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" event={"ID":"e947463f-afe0-40a7-8f9f-b5d76d2086d0","Type":"ContainerStarted","Data":"04f4dbbe536199ba80e2a6febf093475698cfc850ae2b7080bb68e91e84f61f2"} Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.592673 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.597604 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"9f9c9d5b3bfbc33bbdf53cc32c250c7bdaa2343a602bac6e427b880402f4f06e"} Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.602687 4693 patch_prober.go:28] interesting pod/router-default-5444994796-blq4r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 08 07:19:25 crc kubenswrapper[4693]: [-]has-synced failed: reason withheld Oct 08 07:19:25 crc kubenswrapper[4693]: [+]process-running ok Oct 08 07:19:25 crc kubenswrapper[4693]: healthz check failed Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.602760 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-blq4r" podUID="30132782-7481-4ce8-957f-759ea66a4eaf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.617548 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x985n\" (UniqueName: \"kubernetes.io/projected/b509fff2-7ab7-4e9a-9057-5fda5fa66f41-kube-api-access-x985n\") pod \"redhat-marketplace-rxdhl\" (UID: \"b509fff2-7ab7-4e9a-9057-5fda5fa66f41\") " pod="openshift-marketplace/redhat-marketplace-rxdhl" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.618398 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"75724f01d86571003ee94dc42f14a418cdc7b68cdd18735468d016a4fd4cea72"} Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.618849 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.625615 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" podStartSLOduration=130.625596841 podStartE2EDuration="2m10.625596841s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:25.625306573 +0000 UTC m=+150.996271508" watchObservedRunningTime="2025-10-08 07:19:25.625596841 +0000 UTC m=+150.996561776" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.635126 4693 generic.go:334] "Generic (PLEG): container finished" podID="de56a156-ef96-4b18-bd73-d72965f7de18" containerID="0023709637a207d88d3649bcbb3cb0634bfe92e873af8164e0a83211e9e1f7ee" exitCode=0 Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.635333 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sf677" event={"ID":"de56a156-ef96-4b18-bd73-d72965f7de18","Type":"ContainerDied","Data":"0023709637a207d88d3649bcbb3cb0634bfe92e873af8164e0a83211e9e1f7ee"} Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.635422 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sf677" event={"ID":"de56a156-ef96-4b18-bd73-d72965f7de18","Type":"ContainerStarted","Data":"aa3db492c97e65561096befd99f1bc3cb7c7f5ef7abb71ca0a2d0485bf4c99a1"} Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.650365 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-227k8" event={"ID":"4f00a0d4-1d78-4ad4-808c-373fb017fe95","Type":"ContainerStarted","Data":"99d9b126f119fddd2064866dbdee39ff485828a59592f23f9d96c523554a9fec"} Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.653383 4693 generic.go:334] "Generic (PLEG): container finished" podID="17d04184-b555-437c-b6bb-91a4e13263d8" containerID="21f86095c1730e57a02dec6af25d5b9e2c0a71d70e7c9d7f4798531b1349813f" exitCode=0 Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.653645 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tz77" event={"ID":"17d04184-b555-437c-b6bb-91a4e13263d8","Type":"ContainerDied","Data":"21f86095c1730e57a02dec6af25d5b9e2c0a71d70e7c9d7f4798531b1349813f"} Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.653700 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tz77" event={"ID":"17d04184-b555-437c-b6bb-91a4e13263d8","Type":"ContainerStarted","Data":"c390e642beeb5048050daf9dbc488c83577d112c3356cc9de27452a0e8279dd3"} Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.665012 4693 generic.go:334] "Generic (PLEG): container finished" podID="ca170ba2-9128-4ba1-9084-c2373299a0d9" containerID="f81ecfd9e0d88fdbf50b98fa8e52b4ea9b5f819701f34d24173d11f2a5852831" exitCode=0 Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.665069 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dd458" event={"ID":"ca170ba2-9128-4ba1-9084-c2373299a0d9","Type":"ContainerDied","Data":"f81ecfd9e0d88fdbf50b98fa8e52b4ea9b5f819701f34d24173d11f2a5852831"} Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.666358 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dd458" event={"ID":"ca170ba2-9128-4ba1-9084-c2373299a0d9","Type":"ContainerStarted","Data":"e474d29fbde1da5a97f6f25309a7158665a844e18b175f40504bb6ca64d2f478"} Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.687667 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rxdhl" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.716798 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-227k8" podStartSLOduration=12.716781058 podStartE2EDuration="12.716781058s" podCreationTimestamp="2025-10-08 07:19:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:25.689583467 +0000 UTC m=+151.060548402" watchObservedRunningTime="2025-10-08 07:19:25.716781058 +0000 UTC m=+151.087745993" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.740285 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gxtd2"] Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.741400 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gxtd2" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.757730 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gxtd2"] Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.874138 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31f9f4e1-d825-4a21-883a-c670744048a7-catalog-content\") pod \"redhat-marketplace-gxtd2\" (UID: \"31f9f4e1-d825-4a21-883a-c670744048a7\") " pod="openshift-marketplace/redhat-marketplace-gxtd2" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.874563 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vntk6\" (UniqueName: \"kubernetes.io/projected/31f9f4e1-d825-4a21-883a-c670744048a7-kube-api-access-vntk6\") pod \"redhat-marketplace-gxtd2\" (UID: \"31f9f4e1-d825-4a21-883a-c670744048a7\") " pod="openshift-marketplace/redhat-marketplace-gxtd2" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.874616 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31f9f4e1-d825-4a21-883a-c670744048a7-utilities\") pod \"redhat-marketplace-gxtd2\" (UID: \"31f9f4e1-d825-4a21-883a-c670744048a7\") " pod="openshift-marketplace/redhat-marketplace-gxtd2" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.885387 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.885437 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.900227 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.903157 4693 patch_prober.go:28] interesting pod/apiserver-76f77b778f-t484z container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Oct 08 07:19:25 crc kubenswrapper[4693]: [+]log ok Oct 08 07:19:25 crc kubenswrapper[4693]: [+]etcd ok Oct 08 07:19:25 crc kubenswrapper[4693]: [+]poststarthook/start-apiserver-admission-initializer ok Oct 08 07:19:25 crc kubenswrapper[4693]: [+]poststarthook/generic-apiserver-start-informers ok Oct 08 07:19:25 crc kubenswrapper[4693]: [+]poststarthook/max-in-flight-filter ok Oct 08 07:19:25 crc kubenswrapper[4693]: [+]poststarthook/storage-object-count-tracker-hook ok Oct 08 07:19:25 crc kubenswrapper[4693]: [+]poststarthook/image.openshift.io-apiserver-caches ok Oct 08 07:19:25 crc kubenswrapper[4693]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Oct 08 07:19:25 crc kubenswrapper[4693]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Oct 08 07:19:25 crc kubenswrapper[4693]: [+]poststarthook/project.openshift.io-projectcache ok Oct 08 07:19:25 crc kubenswrapper[4693]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Oct 08 07:19:25 crc kubenswrapper[4693]: [+]poststarthook/openshift.io-startinformers ok Oct 08 07:19:25 crc kubenswrapper[4693]: [+]poststarthook/openshift.io-restmapperupdater ok Oct 08 07:19:25 crc kubenswrapper[4693]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Oct 08 07:19:25 crc kubenswrapper[4693]: livez check failed Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.903226 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-t484z" podUID="5a760e51-6b0f-431c-ba90-99416b3f215a" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.975311 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4de73ab3-cbbb-4383-9915-9bcc9d556054-kube-api-access\") pod \"4de73ab3-cbbb-4383-9915-9bcc9d556054\" (UID: \"4de73ab3-cbbb-4383-9915-9bcc9d556054\") " Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.975455 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4de73ab3-cbbb-4383-9915-9bcc9d556054-kubelet-dir\") pod \"4de73ab3-cbbb-4383-9915-9bcc9d556054\" (UID: \"4de73ab3-cbbb-4383-9915-9bcc9d556054\") " Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.975697 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vntk6\" (UniqueName: \"kubernetes.io/projected/31f9f4e1-d825-4a21-883a-c670744048a7-kube-api-access-vntk6\") pod \"redhat-marketplace-gxtd2\" (UID: \"31f9f4e1-d825-4a21-883a-c670744048a7\") " pod="openshift-marketplace/redhat-marketplace-gxtd2" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.975739 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31f9f4e1-d825-4a21-883a-c670744048a7-utilities\") pod \"redhat-marketplace-gxtd2\" (UID: \"31f9f4e1-d825-4a21-883a-c670744048a7\") " pod="openshift-marketplace/redhat-marketplace-gxtd2" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.975799 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31f9f4e1-d825-4a21-883a-c670744048a7-catalog-content\") pod \"redhat-marketplace-gxtd2\" (UID: \"31f9f4e1-d825-4a21-883a-c670744048a7\") " pod="openshift-marketplace/redhat-marketplace-gxtd2" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.977161 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4de73ab3-cbbb-4383-9915-9bcc9d556054-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "4de73ab3-cbbb-4383-9915-9bcc9d556054" (UID: "4de73ab3-cbbb-4383-9915-9bcc9d556054"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.977448 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31f9f4e1-d825-4a21-883a-c670744048a7-utilities\") pod \"redhat-marketplace-gxtd2\" (UID: \"31f9f4e1-d825-4a21-883a-c670744048a7\") " pod="openshift-marketplace/redhat-marketplace-gxtd2" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.978298 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31f9f4e1-d825-4a21-883a-c670744048a7-catalog-content\") pod \"redhat-marketplace-gxtd2\" (UID: \"31f9f4e1-d825-4a21-883a-c670744048a7\") " pod="openshift-marketplace/redhat-marketplace-gxtd2" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.981222 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4de73ab3-cbbb-4383-9915-9bcc9d556054-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "4de73ab3-cbbb-4383-9915-9bcc9d556054" (UID: "4de73ab3-cbbb-4383-9915-9bcc9d556054"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.994957 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vntk6\" (UniqueName: \"kubernetes.io/projected/31f9f4e1-d825-4a21-883a-c670744048a7-kube-api-access-vntk6\") pod \"redhat-marketplace-gxtd2\" (UID: \"31f9f4e1-d825-4a21-883a-c670744048a7\") " pod="openshift-marketplace/redhat-marketplace-gxtd2" Oct 08 07:19:25 crc kubenswrapper[4693]: I1008 07:19:25.997671 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rxdhl"] Oct 08 07:19:26 crc kubenswrapper[4693]: W1008 07:19:26.013735 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb509fff2_7ab7_4e9a_9057_5fda5fa66f41.slice/crio-7693c3b5f06ee372798c127c98d6624e4c3462981a3a2303d466b486d9b74c04 WatchSource:0}: Error finding container 7693c3b5f06ee372798c127c98d6624e4c3462981a3a2303d466b486d9b74c04: Status 404 returned error can't find the container with id 7693c3b5f06ee372798c127c98d6624e4c3462981a3a2303d466b486d9b74c04 Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.022206 4693 patch_prober.go:28] interesting pod/downloads-7954f5f757-pwbtw container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.022517 4693 patch_prober.go:28] interesting pod/downloads-7954f5f757-pwbtw container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.022565 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-pwbtw" podUID="e779da98-f489-4eac-9633-857e35f9d68a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.022794 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-pwbtw" podUID="e779da98-f489-4eac-9633-857e35f9d68a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.077704 4693 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4de73ab3-cbbb-4383-9915-9bcc9d556054-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.077744 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4de73ab3-cbbb-4383-9915-9bcc9d556054-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.102535 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gxtd2" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.323688 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2q242"] Oct 08 07:19:26 crc kubenswrapper[4693]: E1008 07:19:26.329926 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4de73ab3-cbbb-4383-9915-9bcc9d556054" containerName="pruner" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.329961 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="4de73ab3-cbbb-4383-9915-9bcc9d556054" containerName="pruner" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.330157 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="4de73ab3-cbbb-4383-9915-9bcc9d556054" containerName="pruner" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.331253 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2q242" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.333478 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.343677 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2q242"] Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.389878 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gxtd2"] Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.399476 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3163591b-81ce-4897-8170-961affb60344-catalog-content\") pod \"redhat-operators-2q242\" (UID: \"3163591b-81ce-4897-8170-961affb60344\") " pod="openshift-marketplace/redhat-operators-2q242" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.399594 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3163591b-81ce-4897-8170-961affb60344-utilities\") pod \"redhat-operators-2q242\" (UID: \"3163591b-81ce-4897-8170-961affb60344\") " pod="openshift-marketplace/redhat-operators-2q242" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.399702 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhbdv\" (UniqueName: \"kubernetes.io/projected/3163591b-81ce-4897-8170-961affb60344-kube-api-access-xhbdv\") pod \"redhat-operators-2q242\" (UID: \"3163591b-81ce-4897-8170-961affb60344\") " pod="openshift-marketplace/redhat-operators-2q242" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.502514 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3163591b-81ce-4897-8170-961affb60344-catalog-content\") pod \"redhat-operators-2q242\" (UID: \"3163591b-81ce-4897-8170-961affb60344\") " pod="openshift-marketplace/redhat-operators-2q242" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.503023 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3163591b-81ce-4897-8170-961affb60344-utilities\") pod \"redhat-operators-2q242\" (UID: \"3163591b-81ce-4897-8170-961affb60344\") " pod="openshift-marketplace/redhat-operators-2q242" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.503369 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhbdv\" (UniqueName: \"kubernetes.io/projected/3163591b-81ce-4897-8170-961affb60344-kube-api-access-xhbdv\") pod \"redhat-operators-2q242\" (UID: \"3163591b-81ce-4897-8170-961affb60344\") " pod="openshift-marketplace/redhat-operators-2q242" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.503402 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3163591b-81ce-4897-8170-961affb60344-catalog-content\") pod \"redhat-operators-2q242\" (UID: \"3163591b-81ce-4897-8170-961affb60344\") " pod="openshift-marketplace/redhat-operators-2q242" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.504172 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3163591b-81ce-4897-8170-961affb60344-utilities\") pod \"redhat-operators-2q242\" (UID: \"3163591b-81ce-4897-8170-961affb60344\") " pod="openshift-marketplace/redhat-operators-2q242" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.527845 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhbdv\" (UniqueName: \"kubernetes.io/projected/3163591b-81ce-4897-8170-961affb60344-kube-api-access-xhbdv\") pod \"redhat-operators-2q242\" (UID: \"3163591b-81ce-4897-8170-961affb60344\") " pod="openshift-marketplace/redhat-operators-2q242" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.552296 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.558936 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xstkp" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.598422 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-blq4r" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.613450 4693 patch_prober.go:28] interesting pod/router-default-5444994796-blq4r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 08 07:19:26 crc kubenswrapper[4693]: [-]has-synced failed: reason withheld Oct 08 07:19:26 crc kubenswrapper[4693]: [+]process-running ok Oct 08 07:19:26 crc kubenswrapper[4693]: healthz check failed Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.613519 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-blq4r" podUID="30132782-7481-4ce8-957f-759ea66a4eaf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.693951 4693 generic.go:334] "Generic (PLEG): container finished" podID="b509fff2-7ab7-4e9a-9057-5fda5fa66f41" containerID="e88f77d651a1a7466a0377f46cd318ff2b39a605f3cf6168453561f3c3e35af3" exitCode=0 Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.694069 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rxdhl" event={"ID":"b509fff2-7ab7-4e9a-9057-5fda5fa66f41","Type":"ContainerDied","Data":"e88f77d651a1a7466a0377f46cd318ff2b39a605f3cf6168453561f3c3e35af3"} Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.694114 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rxdhl" event={"ID":"b509fff2-7ab7-4e9a-9057-5fda5fa66f41","Type":"ContainerStarted","Data":"7693c3b5f06ee372798c127c98d6624e4c3462981a3a2303d466b486d9b74c04"} Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.700846 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gxtd2" event={"ID":"31f9f4e1-d825-4a21-883a-c670744048a7","Type":"ContainerStarted","Data":"5d01e65d4f39165d8a03c8497db464e3892824ca7733a782d567ba584108e43c"} Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.700898 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gxtd2" event={"ID":"31f9f4e1-d825-4a21-883a-c670744048a7","Type":"ContainerStarted","Data":"0dbecbdf699404d6fae2f2fa7c2c01365aae7ed94eaf5a33e38c4dcd26acf278"} Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.704514 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.705414 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"4de73ab3-cbbb-4383-9915-9bcc9d556054","Type":"ContainerDied","Data":"a1b562337689441a04917c7227fc86de76c4e714614235820fd93cee6104b472"} Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.705471 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a1b562337689441a04917c7227fc86de76c4e714614235820fd93cee6104b472" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.706128 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2q242" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.733121 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jb29z"] Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.739382 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jb29z" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.810907 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e88d2996-c98f-4cce-b986-21611b1de03a-catalog-content\") pod \"redhat-operators-jb29z\" (UID: \"e88d2996-c98f-4cce-b986-21611b1de03a\") " pod="openshift-marketplace/redhat-operators-jb29z" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.810973 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgt7h\" (UniqueName: \"kubernetes.io/projected/e88d2996-c98f-4cce-b986-21611b1de03a-kube-api-access-wgt7h\") pod \"redhat-operators-jb29z\" (UID: \"e88d2996-c98f-4cce-b986-21611b1de03a\") " pod="openshift-marketplace/redhat-operators-jb29z" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.811040 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e88d2996-c98f-4cce-b986-21611b1de03a-utilities\") pod \"redhat-operators-jb29z\" (UID: \"e88d2996-c98f-4cce-b986-21611b1de03a\") " pod="openshift-marketplace/redhat-operators-jb29z" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.855941 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jb29z"] Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.856853 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.856879 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.860078 4693 patch_prober.go:28] interesting pod/console-f9d7485db-x7lvf container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.39:8443/health\": dial tcp 10.217.0.39:8443: connect: connection refused" start-of-body= Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.860122 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-x7lvf" podUID="c90da226-e34a-4d72-a64d-132a45439e4d" containerName="console" probeResult="failure" output="Get \"https://10.217.0.39:8443/health\": dial tcp 10.217.0.39:8443: connect: connection refused" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.912561 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e88d2996-c98f-4cce-b986-21611b1de03a-catalog-content\") pod \"redhat-operators-jb29z\" (UID: \"e88d2996-c98f-4cce-b986-21611b1de03a\") " pod="openshift-marketplace/redhat-operators-jb29z" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.912604 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgt7h\" (UniqueName: \"kubernetes.io/projected/e88d2996-c98f-4cce-b986-21611b1de03a-kube-api-access-wgt7h\") pod \"redhat-operators-jb29z\" (UID: \"e88d2996-c98f-4cce-b986-21611b1de03a\") " pod="openshift-marketplace/redhat-operators-jb29z" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.912629 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e88d2996-c98f-4cce-b986-21611b1de03a-utilities\") pod \"redhat-operators-jb29z\" (UID: \"e88d2996-c98f-4cce-b986-21611b1de03a\") " pod="openshift-marketplace/redhat-operators-jb29z" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.924124 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e88d2996-c98f-4cce-b986-21611b1de03a-utilities\") pod \"redhat-operators-jb29z\" (UID: \"e88d2996-c98f-4cce-b986-21611b1de03a\") " pod="openshift-marketplace/redhat-operators-jb29z" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.924841 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e88d2996-c98f-4cce-b986-21611b1de03a-catalog-content\") pod \"redhat-operators-jb29z\" (UID: \"e88d2996-c98f-4cce-b986-21611b1de03a\") " pod="openshift-marketplace/redhat-operators-jb29z" Oct 08 07:19:26 crc kubenswrapper[4693]: I1008 07:19:26.955883 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgt7h\" (UniqueName: \"kubernetes.io/projected/e88d2996-c98f-4cce-b986-21611b1de03a-kube-api-access-wgt7h\") pod \"redhat-operators-jb29z\" (UID: \"e88d2996-c98f-4cce-b986-21611b1de03a\") " pod="openshift-marketplace/redhat-operators-jb29z" Oct 08 07:19:27 crc kubenswrapper[4693]: I1008 07:19:27.035593 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-97z2l" Oct 08 07:19:27 crc kubenswrapper[4693]: I1008 07:19:27.116114 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jb29z" Oct 08 07:19:27 crc kubenswrapper[4693]: I1008 07:19:27.120032 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2q242"] Oct 08 07:19:27 crc kubenswrapper[4693]: W1008 07:19:27.132552 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3163591b_81ce_4897_8170_961affb60344.slice/crio-28d2af4bf8471e3bb55410dfdf16272e4251ef8d8755118552a776ea9826af83 WatchSource:0}: Error finding container 28d2af4bf8471e3bb55410dfdf16272e4251ef8d8755118552a776ea9826af83: Status 404 returned error can't find the container with id 28d2af4bf8471e3bb55410dfdf16272e4251ef8d8755118552a776ea9826af83 Oct 08 07:19:27 crc kubenswrapper[4693]: I1008 07:19:27.377139 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jb29z"] Oct 08 07:19:27 crc kubenswrapper[4693]: W1008 07:19:27.417163 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode88d2996_c98f_4cce_b986_21611b1de03a.slice/crio-bf775062815544824fbd6b8af4fc1c183110d270d9636af0a7f64a89b432e0c5 WatchSource:0}: Error finding container bf775062815544824fbd6b8af4fc1c183110d270d9636af0a7f64a89b432e0c5: Status 404 returned error can't find the container with id bf775062815544824fbd6b8af4fc1c183110d270d9636af0a7f64a89b432e0c5 Oct 08 07:19:27 crc kubenswrapper[4693]: I1008 07:19:27.484486 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 08 07:19:27 crc kubenswrapper[4693]: I1008 07:19:27.487024 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 08 07:19:27 crc kubenswrapper[4693]: I1008 07:19:27.490776 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Oct 08 07:19:27 crc kubenswrapper[4693]: I1008 07:19:27.496258 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Oct 08 07:19:27 crc kubenswrapper[4693]: I1008 07:19:27.498179 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 08 07:19:27 crc kubenswrapper[4693]: I1008 07:19:27.530755 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2a08a011-72cb-473d-b8c8-3357b34e680f-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"2a08a011-72cb-473d-b8c8-3357b34e680f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 08 07:19:27 crc kubenswrapper[4693]: I1008 07:19:27.530803 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2a08a011-72cb-473d-b8c8-3357b34e680f-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"2a08a011-72cb-473d-b8c8-3357b34e680f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 08 07:19:27 crc kubenswrapper[4693]: I1008 07:19:27.601566 4693 patch_prober.go:28] interesting pod/router-default-5444994796-blq4r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 08 07:19:27 crc kubenswrapper[4693]: [-]has-synced failed: reason withheld Oct 08 07:19:27 crc kubenswrapper[4693]: [+]process-running ok Oct 08 07:19:27 crc kubenswrapper[4693]: healthz check failed Oct 08 07:19:27 crc kubenswrapper[4693]: I1008 07:19:27.601625 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-blq4r" podUID="30132782-7481-4ce8-957f-759ea66a4eaf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 08 07:19:27 crc kubenswrapper[4693]: I1008 07:19:27.639706 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2a08a011-72cb-473d-b8c8-3357b34e680f-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"2a08a011-72cb-473d-b8c8-3357b34e680f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 08 07:19:27 crc kubenswrapper[4693]: I1008 07:19:27.639779 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2a08a011-72cb-473d-b8c8-3357b34e680f-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"2a08a011-72cb-473d-b8c8-3357b34e680f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 08 07:19:27 crc kubenswrapper[4693]: I1008 07:19:27.639902 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2a08a011-72cb-473d-b8c8-3357b34e680f-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"2a08a011-72cb-473d-b8c8-3357b34e680f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 08 07:19:27 crc kubenswrapper[4693]: I1008 07:19:27.660453 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2a08a011-72cb-473d-b8c8-3357b34e680f-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"2a08a011-72cb-473d-b8c8-3357b34e680f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 08 07:19:27 crc kubenswrapper[4693]: I1008 07:19:27.723170 4693 generic.go:334] "Generic (PLEG): container finished" podID="3163591b-81ce-4897-8170-961affb60344" containerID="ef9f6f50ddd8475cf82837e128df3fd52a8bb5cc96cb66272dc4e9793657329b" exitCode=0 Oct 08 07:19:27 crc kubenswrapper[4693]: I1008 07:19:27.723246 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2q242" event={"ID":"3163591b-81ce-4897-8170-961affb60344","Type":"ContainerDied","Data":"ef9f6f50ddd8475cf82837e128df3fd52a8bb5cc96cb66272dc4e9793657329b"} Oct 08 07:19:27 crc kubenswrapper[4693]: I1008 07:19:27.723281 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2q242" event={"ID":"3163591b-81ce-4897-8170-961affb60344","Type":"ContainerStarted","Data":"28d2af4bf8471e3bb55410dfdf16272e4251ef8d8755118552a776ea9826af83"} Oct 08 07:19:27 crc kubenswrapper[4693]: I1008 07:19:27.727599 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jb29z" event={"ID":"e88d2996-c98f-4cce-b986-21611b1de03a","Type":"ContainerStarted","Data":"bf775062815544824fbd6b8af4fc1c183110d270d9636af0a7f64a89b432e0c5"} Oct 08 07:19:27 crc kubenswrapper[4693]: I1008 07:19:27.731426 4693 generic.go:334] "Generic (PLEG): container finished" podID="31f9f4e1-d825-4a21-883a-c670744048a7" containerID="5d01e65d4f39165d8a03c8497db464e3892824ca7733a782d567ba584108e43c" exitCode=0 Oct 08 07:19:27 crc kubenswrapper[4693]: I1008 07:19:27.731494 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gxtd2" event={"ID":"31f9f4e1-d825-4a21-883a-c670744048a7","Type":"ContainerDied","Data":"5d01e65d4f39165d8a03c8497db464e3892824ca7733a782d567ba584108e43c"} Oct 08 07:19:27 crc kubenswrapper[4693]: I1008 07:19:27.806377 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 08 07:19:28 crc kubenswrapper[4693]: I1008 07:19:28.270747 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 08 07:19:28 crc kubenswrapper[4693]: W1008 07:19:28.284620 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod2a08a011_72cb_473d_b8c8_3357b34e680f.slice/crio-02abfc064b62b987eef3135e2ec90087be9ae5663cc2dd0bd45689f0610eb716 WatchSource:0}: Error finding container 02abfc064b62b987eef3135e2ec90087be9ae5663cc2dd0bd45689f0610eb716: Status 404 returned error can't find the container with id 02abfc064b62b987eef3135e2ec90087be9ae5663cc2dd0bd45689f0610eb716 Oct 08 07:19:28 crc kubenswrapper[4693]: I1008 07:19:28.604290 4693 patch_prober.go:28] interesting pod/router-default-5444994796-blq4r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 08 07:19:28 crc kubenswrapper[4693]: [-]has-synced failed: reason withheld Oct 08 07:19:28 crc kubenswrapper[4693]: [+]process-running ok Oct 08 07:19:28 crc kubenswrapper[4693]: healthz check failed Oct 08 07:19:28 crc kubenswrapper[4693]: I1008 07:19:28.604391 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-blq4r" podUID="30132782-7481-4ce8-957f-759ea66a4eaf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 08 07:19:28 crc kubenswrapper[4693]: I1008 07:19:28.746312 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"2a08a011-72cb-473d-b8c8-3357b34e680f","Type":"ContainerStarted","Data":"02abfc064b62b987eef3135e2ec90087be9ae5663cc2dd0bd45689f0610eb716"} Oct 08 07:19:28 crc kubenswrapper[4693]: I1008 07:19:28.753129 4693 generic.go:334] "Generic (PLEG): container finished" podID="e88d2996-c98f-4cce-b986-21611b1de03a" containerID="eacb28bd8bccfb0bfac88f9d325021989c4619170d67394f09cc80e69a9880a8" exitCode=0 Oct 08 07:19:28 crc kubenswrapper[4693]: I1008 07:19:28.753179 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jb29z" event={"ID":"e88d2996-c98f-4cce-b986-21611b1de03a","Type":"ContainerDied","Data":"eacb28bd8bccfb0bfac88f9d325021989c4619170d67394f09cc80e69a9880a8"} Oct 08 07:19:29 crc kubenswrapper[4693]: I1008 07:19:29.601757 4693 patch_prober.go:28] interesting pod/router-default-5444994796-blq4r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 08 07:19:29 crc kubenswrapper[4693]: [-]has-synced failed: reason withheld Oct 08 07:19:29 crc kubenswrapper[4693]: [+]process-running ok Oct 08 07:19:29 crc kubenswrapper[4693]: healthz check failed Oct 08 07:19:29 crc kubenswrapper[4693]: I1008 07:19:29.602363 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-blq4r" podUID="30132782-7481-4ce8-957f-759ea66a4eaf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 08 07:19:29 crc kubenswrapper[4693]: I1008 07:19:29.763009 4693 generic.go:334] "Generic (PLEG): container finished" podID="2a08a011-72cb-473d-b8c8-3357b34e680f" containerID="16a9b8bf3a51a8d94da2019ea62b31fa7e64217f5ee0ea6075bd3ee5e14b2459" exitCode=0 Oct 08 07:19:29 crc kubenswrapper[4693]: I1008 07:19:29.763060 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"2a08a011-72cb-473d-b8c8-3357b34e680f","Type":"ContainerDied","Data":"16a9b8bf3a51a8d94da2019ea62b31fa7e64217f5ee0ea6075bd3ee5e14b2459"} Oct 08 07:19:30 crc kubenswrapper[4693]: I1008 07:19:30.601795 4693 patch_prober.go:28] interesting pod/router-default-5444994796-blq4r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 08 07:19:30 crc kubenswrapper[4693]: [-]has-synced failed: reason withheld Oct 08 07:19:30 crc kubenswrapper[4693]: [+]process-running ok Oct 08 07:19:30 crc kubenswrapper[4693]: healthz check failed Oct 08 07:19:30 crc kubenswrapper[4693]: I1008 07:19:30.601952 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-blq4r" podUID="30132782-7481-4ce8-957f-759ea66a4eaf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 08 07:19:30 crc kubenswrapper[4693]: I1008 07:19:30.907550 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:30 crc kubenswrapper[4693]: I1008 07:19:30.914279 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-t484z" Oct 08 07:19:31 crc kubenswrapper[4693]: I1008 07:19:31.601827 4693 patch_prober.go:28] interesting pod/router-default-5444994796-blq4r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 08 07:19:31 crc kubenswrapper[4693]: [-]has-synced failed: reason withheld Oct 08 07:19:31 crc kubenswrapper[4693]: [+]process-running ok Oct 08 07:19:31 crc kubenswrapper[4693]: healthz check failed Oct 08 07:19:31 crc kubenswrapper[4693]: I1008 07:19:31.602551 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-blq4r" podUID="30132782-7481-4ce8-957f-759ea66a4eaf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 08 07:19:32 crc kubenswrapper[4693]: I1008 07:19:32.083411 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-sgmv4" Oct 08 07:19:32 crc kubenswrapper[4693]: I1008 07:19:32.604022 4693 patch_prober.go:28] interesting pod/router-default-5444994796-blq4r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 08 07:19:32 crc kubenswrapper[4693]: [-]has-synced failed: reason withheld Oct 08 07:19:32 crc kubenswrapper[4693]: [+]process-running ok Oct 08 07:19:32 crc kubenswrapper[4693]: healthz check failed Oct 08 07:19:32 crc kubenswrapper[4693]: I1008 07:19:32.604127 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-blq4r" podUID="30132782-7481-4ce8-957f-759ea66a4eaf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 08 07:19:33 crc kubenswrapper[4693]: I1008 07:19:33.601971 4693 patch_prober.go:28] interesting pod/router-default-5444994796-blq4r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 08 07:19:33 crc kubenswrapper[4693]: [-]has-synced failed: reason withheld Oct 08 07:19:33 crc kubenswrapper[4693]: [+]process-running ok Oct 08 07:19:33 crc kubenswrapper[4693]: healthz check failed Oct 08 07:19:33 crc kubenswrapper[4693]: I1008 07:19:33.602050 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-blq4r" podUID="30132782-7481-4ce8-957f-759ea66a4eaf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 08 07:19:34 crc kubenswrapper[4693]: I1008 07:19:34.600427 4693 patch_prober.go:28] interesting pod/router-default-5444994796-blq4r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 08 07:19:34 crc kubenswrapper[4693]: [-]has-synced failed: reason withheld Oct 08 07:19:34 crc kubenswrapper[4693]: [+]process-running ok Oct 08 07:19:34 crc kubenswrapper[4693]: healthz check failed Oct 08 07:19:34 crc kubenswrapper[4693]: I1008 07:19:34.600957 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-blq4r" podUID="30132782-7481-4ce8-957f-759ea66a4eaf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 08 07:19:35 crc kubenswrapper[4693]: I1008 07:19:35.391717 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 08 07:19:35 crc kubenswrapper[4693]: I1008 07:19:35.510431 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2a08a011-72cb-473d-b8c8-3357b34e680f-kubelet-dir\") pod \"2a08a011-72cb-473d-b8c8-3357b34e680f\" (UID: \"2a08a011-72cb-473d-b8c8-3357b34e680f\") " Oct 08 07:19:35 crc kubenswrapper[4693]: I1008 07:19:35.510555 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2a08a011-72cb-473d-b8c8-3357b34e680f-kube-api-access\") pod \"2a08a011-72cb-473d-b8c8-3357b34e680f\" (UID: \"2a08a011-72cb-473d-b8c8-3357b34e680f\") " Oct 08 07:19:35 crc kubenswrapper[4693]: I1008 07:19:35.510590 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2a08a011-72cb-473d-b8c8-3357b34e680f-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "2a08a011-72cb-473d-b8c8-3357b34e680f" (UID: "2a08a011-72cb-473d-b8c8-3357b34e680f"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 07:19:35 crc kubenswrapper[4693]: I1008 07:19:35.510803 4693 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2a08a011-72cb-473d-b8c8-3357b34e680f-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 08 07:19:35 crc kubenswrapper[4693]: I1008 07:19:35.519021 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a08a011-72cb-473d-b8c8-3357b34e680f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "2a08a011-72cb-473d-b8c8-3357b34e680f" (UID: "2a08a011-72cb-473d-b8c8-3357b34e680f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:19:35 crc kubenswrapper[4693]: I1008 07:19:35.601235 4693 patch_prober.go:28] interesting pod/router-default-5444994796-blq4r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 08 07:19:35 crc kubenswrapper[4693]: [-]has-synced failed: reason withheld Oct 08 07:19:35 crc kubenswrapper[4693]: [+]process-running ok Oct 08 07:19:35 crc kubenswrapper[4693]: healthz check failed Oct 08 07:19:35 crc kubenswrapper[4693]: I1008 07:19:35.601324 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-blq4r" podUID="30132782-7481-4ce8-957f-759ea66a4eaf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 08 07:19:35 crc kubenswrapper[4693]: I1008 07:19:35.612306 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2a08a011-72cb-473d-b8c8-3357b34e680f-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 08 07:19:35 crc kubenswrapper[4693]: I1008 07:19:35.851060 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"2a08a011-72cb-473d-b8c8-3357b34e680f","Type":"ContainerDied","Data":"02abfc064b62b987eef3135e2ec90087be9ae5663cc2dd0bd45689f0610eb716"} Oct 08 07:19:35 crc kubenswrapper[4693]: I1008 07:19:35.851402 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="02abfc064b62b987eef3135e2ec90087be9ae5663cc2dd0bd45689f0610eb716" Oct 08 07:19:35 crc kubenswrapper[4693]: I1008 07:19:35.851581 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 08 07:19:36 crc kubenswrapper[4693]: I1008 07:19:36.031915 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-pwbtw" Oct 08 07:19:36 crc kubenswrapper[4693]: I1008 07:19:36.601872 4693 patch_prober.go:28] interesting pod/router-default-5444994796-blq4r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 08 07:19:36 crc kubenswrapper[4693]: [-]has-synced failed: reason withheld Oct 08 07:19:36 crc kubenswrapper[4693]: [+]process-running ok Oct 08 07:19:36 crc kubenswrapper[4693]: healthz check failed Oct 08 07:19:36 crc kubenswrapper[4693]: I1008 07:19:36.601961 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-blq4r" podUID="30132782-7481-4ce8-957f-759ea66a4eaf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 08 07:19:36 crc kubenswrapper[4693]: I1008 07:19:36.851237 4693 patch_prober.go:28] interesting pod/console-f9d7485db-x7lvf container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.39:8443/health\": dial tcp 10.217.0.39:8443: connect: connection refused" start-of-body= Oct 08 07:19:36 crc kubenswrapper[4693]: I1008 07:19:36.851521 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-x7lvf" podUID="c90da226-e34a-4d72-a64d-132a45439e4d" containerName="console" probeResult="failure" output="Get \"https://10.217.0.39:8443/health\": dial tcp 10.217.0.39:8443: connect: connection refused" Oct 08 07:19:37 crc kubenswrapper[4693]: I1008 07:19:37.600252 4693 patch_prober.go:28] interesting pod/router-default-5444994796-blq4r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 08 07:19:37 crc kubenswrapper[4693]: [-]has-synced failed: reason withheld Oct 08 07:19:37 crc kubenswrapper[4693]: [+]process-running ok Oct 08 07:19:37 crc kubenswrapper[4693]: healthz check failed Oct 08 07:19:37 crc kubenswrapper[4693]: I1008 07:19:37.600915 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-blq4r" podUID="30132782-7481-4ce8-957f-759ea66a4eaf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 08 07:19:38 crc kubenswrapper[4693]: I1008 07:19:38.568499 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0f68f540-8d3f-4081-8c7e-cd5023991ada-metrics-certs\") pod \"network-metrics-daemon-b2lbv\" (UID: \"0f68f540-8d3f-4081-8c7e-cd5023991ada\") " pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:19:38 crc kubenswrapper[4693]: I1008 07:19:38.577208 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0f68f540-8d3f-4081-8c7e-cd5023991ada-metrics-certs\") pod \"network-metrics-daemon-b2lbv\" (UID: \"0f68f540-8d3f-4081-8c7e-cd5023991ada\") " pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:19:38 crc kubenswrapper[4693]: I1008 07:19:38.603590 4693 patch_prober.go:28] interesting pod/router-default-5444994796-blq4r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 08 07:19:38 crc kubenswrapper[4693]: [-]has-synced failed: reason withheld Oct 08 07:19:38 crc kubenswrapper[4693]: [+]process-running ok Oct 08 07:19:38 crc kubenswrapper[4693]: healthz check failed Oct 08 07:19:38 crc kubenswrapper[4693]: I1008 07:19:38.603691 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-blq4r" podUID="30132782-7481-4ce8-957f-759ea66a4eaf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 08 07:19:38 crc kubenswrapper[4693]: I1008 07:19:38.706597 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-b2lbv" Oct 08 07:19:39 crc kubenswrapper[4693]: I1008 07:19:39.600056 4693 patch_prober.go:28] interesting pod/router-default-5444994796-blq4r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 08 07:19:39 crc kubenswrapper[4693]: [+]has-synced ok Oct 08 07:19:39 crc kubenswrapper[4693]: [+]process-running ok Oct 08 07:19:39 crc kubenswrapper[4693]: healthz check failed Oct 08 07:19:39 crc kubenswrapper[4693]: I1008 07:19:39.600130 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-blq4r" podUID="30132782-7481-4ce8-957f-759ea66a4eaf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 08 07:19:40 crc kubenswrapper[4693]: I1008 07:19:40.601609 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-blq4r" Oct 08 07:19:40 crc kubenswrapper[4693]: I1008 07:19:40.608229 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-blq4r" Oct 08 07:19:44 crc kubenswrapper[4693]: I1008 07:19:44.975510 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:19:46 crc kubenswrapper[4693]: I1008 07:19:46.855682 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:19:46 crc kubenswrapper[4693]: I1008 07:19:46.861875 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:19:49 crc kubenswrapper[4693]: I1008 07:19:49.312437 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-b2lbv"] Oct 08 07:19:49 crc kubenswrapper[4693]: W1008 07:19:49.325484 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0f68f540_8d3f_4081_8c7e_cd5023991ada.slice/crio-3cdf5fa18e6a7906cb11180736ec19458afa0073e95bd8a55bd9edf40bf2d4ce WatchSource:0}: Error finding container 3cdf5fa18e6a7906cb11180736ec19458afa0073e95bd8a55bd9edf40bf2d4ce: Status 404 returned error can't find the container with id 3cdf5fa18e6a7906cb11180736ec19458afa0073e95bd8a55bd9edf40bf2d4ce Oct 08 07:19:49 crc kubenswrapper[4693]: I1008 07:19:49.955611 4693 generic.go:334] "Generic (PLEG): container finished" podID="31f9f4e1-d825-4a21-883a-c670744048a7" containerID="4cb0feee37f315f6c5b9fda5eb3d3e46f8ac03a940be94a1481209ce52720d81" exitCode=0 Oct 08 07:19:49 crc kubenswrapper[4693]: I1008 07:19:49.955747 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gxtd2" event={"ID":"31f9f4e1-d825-4a21-883a-c670744048a7","Type":"ContainerDied","Data":"4cb0feee37f315f6c5b9fda5eb3d3e46f8ac03a940be94a1481209ce52720d81"} Oct 08 07:19:49 crc kubenswrapper[4693]: I1008 07:19:49.958669 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2q242" event={"ID":"3163591b-81ce-4897-8170-961affb60344","Type":"ContainerStarted","Data":"f00c6d25251d1554db37f46dcb571f8d7728d9c6afb92784b8c1e756828d8f72"} Oct 08 07:19:49 crc kubenswrapper[4693]: I1008 07:19:49.966582 4693 generic.go:334] "Generic (PLEG): container finished" podID="de56a156-ef96-4b18-bd73-d72965f7de18" containerID="db0961f2b2a5ab16e4118b0250e9588eae846c0ce11de2030be8aecfdd2a36f4" exitCode=0 Oct 08 07:19:49 crc kubenswrapper[4693]: I1008 07:19:49.966660 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sf677" event={"ID":"de56a156-ef96-4b18-bd73-d72965f7de18","Type":"ContainerDied","Data":"db0961f2b2a5ab16e4118b0250e9588eae846c0ce11de2030be8aecfdd2a36f4"} Oct 08 07:19:49 crc kubenswrapper[4693]: I1008 07:19:49.970699 4693 generic.go:334] "Generic (PLEG): container finished" podID="ca170ba2-9128-4ba1-9084-c2373299a0d9" containerID="805d5cfb4b68a7ba51c51241c1f8bf4bea490a4e150f5242cb13feaa79b4ce0e" exitCode=0 Oct 08 07:19:49 crc kubenswrapper[4693]: I1008 07:19:49.970930 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dd458" event={"ID":"ca170ba2-9128-4ba1-9084-c2373299a0d9","Type":"ContainerDied","Data":"805d5cfb4b68a7ba51c51241c1f8bf4bea490a4e150f5242cb13feaa79b4ce0e"} Oct 08 07:19:49 crc kubenswrapper[4693]: I1008 07:19:49.978007 4693 generic.go:334] "Generic (PLEG): container finished" podID="b509fff2-7ab7-4e9a-9057-5fda5fa66f41" containerID="3deb2256b9154bb0a9d61ff34d0b6b5164caf480a45cdfc81ecaf117a17077d2" exitCode=0 Oct 08 07:19:49 crc kubenswrapper[4693]: I1008 07:19:49.978108 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rxdhl" event={"ID":"b509fff2-7ab7-4e9a-9057-5fda5fa66f41","Type":"ContainerDied","Data":"3deb2256b9154bb0a9d61ff34d0b6b5164caf480a45cdfc81ecaf117a17077d2"} Oct 08 07:19:49 crc kubenswrapper[4693]: I1008 07:19:49.980444 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-b2lbv" event={"ID":"0f68f540-8d3f-4081-8c7e-cd5023991ada","Type":"ContainerStarted","Data":"3cdf5fa18e6a7906cb11180736ec19458afa0073e95bd8a55bd9edf40bf2d4ce"} Oct 08 07:19:49 crc kubenswrapper[4693]: I1008 07:19:49.988582 4693 generic.go:334] "Generic (PLEG): container finished" podID="b4f7b0f9-4902-4768-94ae-0c98e7814f9f" containerID="f49c7d60130e7d4ea68cb91756e5874c619e5d7d2366538be1359eae491f9e34" exitCode=0 Oct 08 07:19:49 crc kubenswrapper[4693]: I1008 07:19:49.988740 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7x6qs" event={"ID":"b4f7b0f9-4902-4768-94ae-0c98e7814f9f","Type":"ContainerDied","Data":"f49c7d60130e7d4ea68cb91756e5874c619e5d7d2366538be1359eae491f9e34"} Oct 08 07:19:50 crc kubenswrapper[4693]: I1008 07:19:50.001081 4693 generic.go:334] "Generic (PLEG): container finished" podID="17d04184-b555-437c-b6bb-91a4e13263d8" containerID="4c06c293fd932571a548ce69a45cc19d01e4454e145a747fdddcf84df37176ac" exitCode=0 Oct 08 07:19:50 crc kubenswrapper[4693]: I1008 07:19:50.001235 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tz77" event={"ID":"17d04184-b555-437c-b6bb-91a4e13263d8","Type":"ContainerDied","Data":"4c06c293fd932571a548ce69a45cc19d01e4454e145a747fdddcf84df37176ac"} Oct 08 07:19:50 crc kubenswrapper[4693]: I1008 07:19:50.003721 4693 generic.go:334] "Generic (PLEG): container finished" podID="e88d2996-c98f-4cce-b986-21611b1de03a" containerID="f2723d854b7d6cac4e909cbddd890d114f0e093d5ecedeb7950839fad76dfa9a" exitCode=0 Oct 08 07:19:50 crc kubenswrapper[4693]: I1008 07:19:50.003781 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jb29z" event={"ID":"e88d2996-c98f-4cce-b986-21611b1de03a","Type":"ContainerDied","Data":"f2723d854b7d6cac4e909cbddd890d114f0e093d5ecedeb7950839fad76dfa9a"} Oct 08 07:19:51 crc kubenswrapper[4693]: I1008 07:19:51.012612 4693 generic.go:334] "Generic (PLEG): container finished" podID="3163591b-81ce-4897-8170-961affb60344" containerID="f00c6d25251d1554db37f46dcb571f8d7728d9c6afb92784b8c1e756828d8f72" exitCode=0 Oct 08 07:19:51 crc kubenswrapper[4693]: I1008 07:19:51.012746 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2q242" event={"ID":"3163591b-81ce-4897-8170-961affb60344","Type":"ContainerDied","Data":"f00c6d25251d1554db37f46dcb571f8d7728d9c6afb92784b8c1e756828d8f72"} Oct 08 07:19:51 crc kubenswrapper[4693]: I1008 07:19:51.019325 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-b2lbv" event={"ID":"0f68f540-8d3f-4081-8c7e-cd5023991ada","Type":"ContainerStarted","Data":"edb0c6ce6b7fb1d64f39c1b1f9d004c8ae9dcef30f1508e4f603bac620c50936"} Oct 08 07:19:51 crc kubenswrapper[4693]: I1008 07:19:51.019903 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-b2lbv" event={"ID":"0f68f540-8d3f-4081-8c7e-cd5023991ada","Type":"ContainerStarted","Data":"4a0c6f2569339d5c7ac9bec80c063e8a4779379b0f97df42dbf8c4824e708594"} Oct 08 07:19:52 crc kubenswrapper[4693]: I1008 07:19:52.070369 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-b2lbv" podStartSLOduration=157.070324795 podStartE2EDuration="2m37.070324795s" podCreationTimestamp="2025-10-08 07:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:19:52.064190245 +0000 UTC m=+177.435155180" watchObservedRunningTime="2025-10-08 07:19:52.070324795 +0000 UTC m=+177.441289770" Oct 08 07:19:53 crc kubenswrapper[4693]: I1008 07:19:53.040513 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7x6qs" event={"ID":"b4f7b0f9-4902-4768-94ae-0c98e7814f9f","Type":"ContainerStarted","Data":"8b3f1505abd1358fec56c095d9269dc35dd62ea06fb720bb637ac660c29ae637"} Oct 08 07:19:53 crc kubenswrapper[4693]: I1008 07:19:53.090554 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-7x6qs" podStartSLOduration=3.503567672 podStartE2EDuration="30.090524563s" podCreationTimestamp="2025-10-08 07:19:23 +0000 UTC" firstStartedPulling="2025-10-08 07:19:25.584528327 +0000 UTC m=+150.955493262" lastFinishedPulling="2025-10-08 07:19:52.171485218 +0000 UTC m=+177.542450153" observedRunningTime="2025-10-08 07:19:53.086470852 +0000 UTC m=+178.457435787" watchObservedRunningTime="2025-10-08 07:19:53.090524563 +0000 UTC m=+178.461489528" Oct 08 07:19:53 crc kubenswrapper[4693]: I1008 07:19:53.489911 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:19:53 crc kubenswrapper[4693]: I1008 07:19:53.490410 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:19:53 crc kubenswrapper[4693]: I1008 07:19:53.812806 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-7x6qs" Oct 08 07:19:53 crc kubenswrapper[4693]: I1008 07:19:53.812912 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-7x6qs" Oct 08 07:19:55 crc kubenswrapper[4693]: I1008 07:19:55.062530 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rxdhl" event={"ID":"b509fff2-7ab7-4e9a-9057-5fda5fa66f41","Type":"ContainerStarted","Data":"841b757bf7c5b22f42d225e22dbb49d5b002813115e8647a5249a52a93c9b7d5"} Oct 08 07:19:55 crc kubenswrapper[4693]: I1008 07:19:55.788905 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-7x6qs" podUID="b4f7b0f9-4902-4768-94ae-0c98e7814f9f" containerName="registry-server" probeResult="failure" output=< Oct 08 07:19:55 crc kubenswrapper[4693]: timeout: failed to connect service ":50051" within 1s Oct 08 07:19:55 crc kubenswrapper[4693]: > Oct 08 07:19:56 crc kubenswrapper[4693]: I1008 07:19:56.101848 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rxdhl" podStartSLOduration=3.245172839 podStartE2EDuration="31.101785036s" podCreationTimestamp="2025-10-08 07:19:25 +0000 UTC" firstStartedPulling="2025-10-08 07:19:26.697925118 +0000 UTC m=+152.068890053" lastFinishedPulling="2025-10-08 07:19:54.554537315 +0000 UTC m=+179.925502250" observedRunningTime="2025-10-08 07:19:56.098424093 +0000 UTC m=+181.469389028" watchObservedRunningTime="2025-10-08 07:19:56.101785036 +0000 UTC m=+181.472750011" Oct 08 07:19:57 crc kubenswrapper[4693]: I1008 07:19:57.051210 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kk6x6" Oct 08 07:19:59 crc kubenswrapper[4693]: I1008 07:19:59.095168 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2q242" event={"ID":"3163591b-81ce-4897-8170-961affb60344","Type":"ContainerStarted","Data":"6ab1cf2980b4307c554ae0a233b4cbfd66b8eeccfc08ce6b9c266dfb15ce4979"} Oct 08 07:20:00 crc kubenswrapper[4693]: I1008 07:20:00.126343 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2q242" podStartSLOduration=4.468402963 podStartE2EDuration="34.126315126s" podCreationTimestamp="2025-10-08 07:19:26 +0000 UTC" firstStartedPulling="2025-10-08 07:19:27.727058443 +0000 UTC m=+153.098023378" lastFinishedPulling="2025-10-08 07:19:57.384970576 +0000 UTC m=+182.755935541" observedRunningTime="2025-10-08 07:20:00.122787809 +0000 UTC m=+185.493752784" watchObservedRunningTime="2025-10-08 07:20:00.126315126 +0000 UTC m=+185.497280081" Oct 08 07:20:03 crc kubenswrapper[4693]: I1008 07:20:03.497748 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 08 07:20:04 crc kubenswrapper[4693]: I1008 07:20:04.501421 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-7x6qs" Oct 08 07:20:04 crc kubenswrapper[4693]: I1008 07:20:04.572955 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-7x6qs" Oct 08 07:20:05 crc kubenswrapper[4693]: I1008 07:20:05.689301 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rxdhl" Oct 08 07:20:05 crc kubenswrapper[4693]: I1008 07:20:05.689440 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rxdhl" Oct 08 07:20:05 crc kubenswrapper[4693]: I1008 07:20:05.760127 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rxdhl" Oct 08 07:20:06 crc kubenswrapper[4693]: I1008 07:20:06.273524 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rxdhl" Oct 08 07:20:06 crc kubenswrapper[4693]: I1008 07:20:06.707418 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2q242" Oct 08 07:20:06 crc kubenswrapper[4693]: I1008 07:20:06.707968 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2q242" Oct 08 07:20:06 crc kubenswrapper[4693]: I1008 07:20:06.758396 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2q242" Oct 08 07:20:07 crc kubenswrapper[4693]: I1008 07:20:07.178741 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gxtd2" event={"ID":"31f9f4e1-d825-4a21-883a-c670744048a7","Type":"ContainerStarted","Data":"732167df2ba2fd6058dc74e439c22eefec85698550497aeab03c63642fc05856"} Oct 08 07:20:07 crc kubenswrapper[4693]: I1008 07:20:07.181693 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sf677" event={"ID":"de56a156-ef96-4b18-bd73-d72965f7de18","Type":"ContainerStarted","Data":"a32cc4ab9e46860e48a0e42901efca173db6777e12a994497285906ffaf43b28"} Oct 08 07:20:07 crc kubenswrapper[4693]: I1008 07:20:07.183295 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tz77" event={"ID":"17d04184-b555-437c-b6bb-91a4e13263d8","Type":"ContainerStarted","Data":"2001dad91e8b71ab96a34f31b3b01761492eda870d3c7b1196b293f62e2da44c"} Oct 08 07:20:07 crc kubenswrapper[4693]: I1008 07:20:07.186534 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dd458" event={"ID":"ca170ba2-9128-4ba1-9084-c2373299a0d9","Type":"ContainerStarted","Data":"c707c4ec02bbf1f947697bb094a864b96db3178c6ff4de34ac11b172e9ead064"} Oct 08 07:20:07 crc kubenswrapper[4693]: I1008 07:20:07.189869 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jb29z" event={"ID":"e88d2996-c98f-4cce-b986-21611b1de03a","Type":"ContainerStarted","Data":"82d6b039f829f27680339d3f97d5a3a1ec7f7c62d60e9d9f1f2cd2718ff8fc6e"} Oct 08 07:20:07 crc kubenswrapper[4693]: I1008 07:20:07.202281 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gxtd2" podStartSLOduration=4.373853802 podStartE2EDuration="42.202247788s" podCreationTimestamp="2025-10-08 07:19:25 +0000 UTC" firstStartedPulling="2025-10-08 07:19:26.704325874 +0000 UTC m=+152.075290809" lastFinishedPulling="2025-10-08 07:20:04.53271986 +0000 UTC m=+189.903684795" observedRunningTime="2025-10-08 07:20:07.200853829 +0000 UTC m=+192.571818764" watchObservedRunningTime="2025-10-08 07:20:07.202247788 +0000 UTC m=+192.573212723" Oct 08 07:20:07 crc kubenswrapper[4693]: I1008 07:20:07.227512 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dd458" podStartSLOduration=4.0962465869999996 podStartE2EDuration="44.227491545s" podCreationTimestamp="2025-10-08 07:19:23 +0000 UTC" firstStartedPulling="2025-10-08 07:19:25.671776486 +0000 UTC m=+151.042741411" lastFinishedPulling="2025-10-08 07:20:05.803021404 +0000 UTC m=+191.173986369" observedRunningTime="2025-10-08 07:20:07.222752344 +0000 UTC m=+192.593717279" watchObservedRunningTime="2025-10-08 07:20:07.227491545 +0000 UTC m=+192.598456480" Oct 08 07:20:07 crc kubenswrapper[4693]: I1008 07:20:07.237991 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2q242" Oct 08 07:20:07 crc kubenswrapper[4693]: I1008 07:20:07.247379 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-sf677" podStartSLOduration=4.05871886 podStartE2EDuration="44.247345703s" podCreationTimestamp="2025-10-08 07:19:23 +0000 UTC" firstStartedPulling="2025-10-08 07:19:25.637062117 +0000 UTC m=+151.008027052" lastFinishedPulling="2025-10-08 07:20:05.82568896 +0000 UTC m=+191.196653895" observedRunningTime="2025-10-08 07:20:07.244181666 +0000 UTC m=+192.615146601" watchObservedRunningTime="2025-10-08 07:20:07.247345703 +0000 UTC m=+192.618310628" Oct 08 07:20:07 crc kubenswrapper[4693]: I1008 07:20:07.276736 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jb29z" podStartSLOduration=4.195404087 podStartE2EDuration="41.276713864s" podCreationTimestamp="2025-10-08 07:19:26 +0000 UTC" firstStartedPulling="2025-10-08 07:19:28.756668992 +0000 UTC m=+154.127633927" lastFinishedPulling="2025-10-08 07:20:05.837978729 +0000 UTC m=+191.208943704" observedRunningTime="2025-10-08 07:20:07.269281189 +0000 UTC m=+192.640246134" watchObservedRunningTime="2025-10-08 07:20:07.276713864 +0000 UTC m=+192.647678799" Oct 08 07:20:07 crc kubenswrapper[4693]: I1008 07:20:07.292910 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4tz77" podStartSLOduration=4.15467234 podStartE2EDuration="44.29287784s" podCreationTimestamp="2025-10-08 07:19:23 +0000 UTC" firstStartedPulling="2025-10-08 07:19:25.655702762 +0000 UTC m=+151.026667697" lastFinishedPulling="2025-10-08 07:20:05.793908232 +0000 UTC m=+191.164873197" observedRunningTime="2025-10-08 07:20:07.291886243 +0000 UTC m=+192.662851178" watchObservedRunningTime="2025-10-08 07:20:07.29287784 +0000 UTC m=+192.663842775" Oct 08 07:20:13 crc kubenswrapper[4693]: I1008 07:20:13.490362 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dd458" Oct 08 07:20:13 crc kubenswrapper[4693]: I1008 07:20:13.491342 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dd458" Oct 08 07:20:13 crc kubenswrapper[4693]: I1008 07:20:13.568765 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dd458" Oct 08 07:20:13 crc kubenswrapper[4693]: I1008 07:20:13.896899 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4tz77" Oct 08 07:20:13 crc kubenswrapper[4693]: I1008 07:20:13.896978 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4tz77" Oct 08 07:20:13 crc kubenswrapper[4693]: I1008 07:20:13.947025 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4tz77" Oct 08 07:20:14 crc kubenswrapper[4693]: I1008 07:20:14.085875 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-sf677" Oct 08 07:20:14 crc kubenswrapper[4693]: I1008 07:20:14.085952 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-sf677" Oct 08 07:20:14 crc kubenswrapper[4693]: I1008 07:20:14.142580 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-sf677" Oct 08 07:20:14 crc kubenswrapper[4693]: I1008 07:20:14.279113 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4tz77" Oct 08 07:20:14 crc kubenswrapper[4693]: I1008 07:20:14.299741 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-sf677" Oct 08 07:20:14 crc kubenswrapper[4693]: I1008 07:20:14.301453 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dd458" Oct 08 07:20:15 crc kubenswrapper[4693]: I1008 07:20:15.946799 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4tz77"] Oct 08 07:20:16 crc kubenswrapper[4693]: I1008 07:20:16.102957 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gxtd2" Oct 08 07:20:16 crc kubenswrapper[4693]: I1008 07:20:16.104094 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gxtd2" Oct 08 07:20:16 crc kubenswrapper[4693]: I1008 07:20:16.167992 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gxtd2" Oct 08 07:20:16 crc kubenswrapper[4693]: I1008 07:20:16.254037 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4tz77" podUID="17d04184-b555-437c-b6bb-91a4e13263d8" containerName="registry-server" containerID="cri-o://2001dad91e8b71ab96a34f31b3b01761492eda870d3c7b1196b293f62e2da44c" gracePeriod=2 Oct 08 07:20:16 crc kubenswrapper[4693]: I1008 07:20:16.325715 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gxtd2" Oct 08 07:20:16 crc kubenswrapper[4693]: I1008 07:20:16.741721 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4tz77" Oct 08 07:20:16 crc kubenswrapper[4693]: I1008 07:20:16.896719 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hb8ff\" (UniqueName: \"kubernetes.io/projected/17d04184-b555-437c-b6bb-91a4e13263d8-kube-api-access-hb8ff\") pod \"17d04184-b555-437c-b6bb-91a4e13263d8\" (UID: \"17d04184-b555-437c-b6bb-91a4e13263d8\") " Oct 08 07:20:16 crc kubenswrapper[4693]: I1008 07:20:16.896785 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/17d04184-b555-437c-b6bb-91a4e13263d8-utilities\") pod \"17d04184-b555-437c-b6bb-91a4e13263d8\" (UID: \"17d04184-b555-437c-b6bb-91a4e13263d8\") " Oct 08 07:20:16 crc kubenswrapper[4693]: I1008 07:20:16.896981 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/17d04184-b555-437c-b6bb-91a4e13263d8-catalog-content\") pod \"17d04184-b555-437c-b6bb-91a4e13263d8\" (UID: \"17d04184-b555-437c-b6bb-91a4e13263d8\") " Oct 08 07:20:16 crc kubenswrapper[4693]: I1008 07:20:16.898680 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/17d04184-b555-437c-b6bb-91a4e13263d8-utilities" (OuterVolumeSpecName: "utilities") pod "17d04184-b555-437c-b6bb-91a4e13263d8" (UID: "17d04184-b555-437c-b6bb-91a4e13263d8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:20:16 crc kubenswrapper[4693]: I1008 07:20:16.906220 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17d04184-b555-437c-b6bb-91a4e13263d8-kube-api-access-hb8ff" (OuterVolumeSpecName: "kube-api-access-hb8ff") pod "17d04184-b555-437c-b6bb-91a4e13263d8" (UID: "17d04184-b555-437c-b6bb-91a4e13263d8"). InnerVolumeSpecName "kube-api-access-hb8ff". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:20:16 crc kubenswrapper[4693]: I1008 07:20:16.960754 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/17d04184-b555-437c-b6bb-91a4e13263d8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "17d04184-b555-437c-b6bb-91a4e13263d8" (UID: "17d04184-b555-437c-b6bb-91a4e13263d8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:20:16 crc kubenswrapper[4693]: I1008 07:20:16.998297 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/17d04184-b555-437c-b6bb-91a4e13263d8-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 07:20:16 crc kubenswrapper[4693]: I1008 07:20:16.998329 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hb8ff\" (UniqueName: \"kubernetes.io/projected/17d04184-b555-437c-b6bb-91a4e13263d8-kube-api-access-hb8ff\") on node \"crc\" DevicePath \"\"" Oct 08 07:20:16 crc kubenswrapper[4693]: I1008 07:20:16.998344 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/17d04184-b555-437c-b6bb-91a4e13263d8-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 07:20:17 crc kubenswrapper[4693]: I1008 07:20:17.116307 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jb29z" Oct 08 07:20:17 crc kubenswrapper[4693]: I1008 07:20:17.116412 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jb29z" Oct 08 07:20:17 crc kubenswrapper[4693]: I1008 07:20:17.194791 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jb29z" Oct 08 07:20:17 crc kubenswrapper[4693]: I1008 07:20:17.262351 4693 generic.go:334] "Generic (PLEG): container finished" podID="17d04184-b555-437c-b6bb-91a4e13263d8" containerID="2001dad91e8b71ab96a34f31b3b01761492eda870d3c7b1196b293f62e2da44c" exitCode=0 Oct 08 07:20:17 crc kubenswrapper[4693]: I1008 07:20:17.262505 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4tz77" Oct 08 07:20:17 crc kubenswrapper[4693]: I1008 07:20:17.262478 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tz77" event={"ID":"17d04184-b555-437c-b6bb-91a4e13263d8","Type":"ContainerDied","Data":"2001dad91e8b71ab96a34f31b3b01761492eda870d3c7b1196b293f62e2da44c"} Oct 08 07:20:17 crc kubenswrapper[4693]: I1008 07:20:17.262584 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tz77" event={"ID":"17d04184-b555-437c-b6bb-91a4e13263d8","Type":"ContainerDied","Data":"c390e642beeb5048050daf9dbc488c83577d112c3356cc9de27452a0e8279dd3"} Oct 08 07:20:17 crc kubenswrapper[4693]: I1008 07:20:17.262616 4693 scope.go:117] "RemoveContainer" containerID="2001dad91e8b71ab96a34f31b3b01761492eda870d3c7b1196b293f62e2da44c" Oct 08 07:20:17 crc kubenswrapper[4693]: I1008 07:20:17.288529 4693 scope.go:117] "RemoveContainer" containerID="4c06c293fd932571a548ce69a45cc19d01e4454e145a747fdddcf84df37176ac" Oct 08 07:20:17 crc kubenswrapper[4693]: I1008 07:20:17.301911 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4tz77"] Oct 08 07:20:17 crc kubenswrapper[4693]: I1008 07:20:17.304933 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4tz77"] Oct 08 07:20:17 crc kubenswrapper[4693]: I1008 07:20:17.308684 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jb29z" Oct 08 07:20:17 crc kubenswrapper[4693]: I1008 07:20:17.335790 4693 scope.go:117] "RemoveContainer" containerID="21f86095c1730e57a02dec6af25d5b9e2c0a71d70e7c9d7f4798531b1349813f" Oct 08 07:20:17 crc kubenswrapper[4693]: I1008 07:20:17.351639 4693 scope.go:117] "RemoveContainer" containerID="2001dad91e8b71ab96a34f31b3b01761492eda870d3c7b1196b293f62e2da44c" Oct 08 07:20:17 crc kubenswrapper[4693]: E1008 07:20:17.352343 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2001dad91e8b71ab96a34f31b3b01761492eda870d3c7b1196b293f62e2da44c\": container with ID starting with 2001dad91e8b71ab96a34f31b3b01761492eda870d3c7b1196b293f62e2da44c not found: ID does not exist" containerID="2001dad91e8b71ab96a34f31b3b01761492eda870d3c7b1196b293f62e2da44c" Oct 08 07:20:17 crc kubenswrapper[4693]: I1008 07:20:17.352383 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2001dad91e8b71ab96a34f31b3b01761492eda870d3c7b1196b293f62e2da44c"} err="failed to get container status \"2001dad91e8b71ab96a34f31b3b01761492eda870d3c7b1196b293f62e2da44c\": rpc error: code = NotFound desc = could not find container \"2001dad91e8b71ab96a34f31b3b01761492eda870d3c7b1196b293f62e2da44c\": container with ID starting with 2001dad91e8b71ab96a34f31b3b01761492eda870d3c7b1196b293f62e2da44c not found: ID does not exist" Oct 08 07:20:17 crc kubenswrapper[4693]: I1008 07:20:17.352431 4693 scope.go:117] "RemoveContainer" containerID="4c06c293fd932571a548ce69a45cc19d01e4454e145a747fdddcf84df37176ac" Oct 08 07:20:17 crc kubenswrapper[4693]: E1008 07:20:17.353011 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c06c293fd932571a548ce69a45cc19d01e4454e145a747fdddcf84df37176ac\": container with ID starting with 4c06c293fd932571a548ce69a45cc19d01e4454e145a747fdddcf84df37176ac not found: ID does not exist" containerID="4c06c293fd932571a548ce69a45cc19d01e4454e145a747fdddcf84df37176ac" Oct 08 07:20:17 crc kubenswrapper[4693]: I1008 07:20:17.353042 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c06c293fd932571a548ce69a45cc19d01e4454e145a747fdddcf84df37176ac"} err="failed to get container status \"4c06c293fd932571a548ce69a45cc19d01e4454e145a747fdddcf84df37176ac\": rpc error: code = NotFound desc = could not find container \"4c06c293fd932571a548ce69a45cc19d01e4454e145a747fdddcf84df37176ac\": container with ID starting with 4c06c293fd932571a548ce69a45cc19d01e4454e145a747fdddcf84df37176ac not found: ID does not exist" Oct 08 07:20:17 crc kubenswrapper[4693]: I1008 07:20:17.353060 4693 scope.go:117] "RemoveContainer" containerID="21f86095c1730e57a02dec6af25d5b9e2c0a71d70e7c9d7f4798531b1349813f" Oct 08 07:20:17 crc kubenswrapper[4693]: E1008 07:20:17.353357 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21f86095c1730e57a02dec6af25d5b9e2c0a71d70e7c9d7f4798531b1349813f\": container with ID starting with 21f86095c1730e57a02dec6af25d5b9e2c0a71d70e7c9d7f4798531b1349813f not found: ID does not exist" containerID="21f86095c1730e57a02dec6af25d5b9e2c0a71d70e7c9d7f4798531b1349813f" Oct 08 07:20:17 crc kubenswrapper[4693]: I1008 07:20:17.353404 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21f86095c1730e57a02dec6af25d5b9e2c0a71d70e7c9d7f4798531b1349813f"} err="failed to get container status \"21f86095c1730e57a02dec6af25d5b9e2c0a71d70e7c9d7f4798531b1349813f\": rpc error: code = NotFound desc = could not find container \"21f86095c1730e57a02dec6af25d5b9e2c0a71d70e7c9d7f4798531b1349813f\": container with ID starting with 21f86095c1730e57a02dec6af25d5b9e2c0a71d70e7c9d7f4798531b1349813f not found: ID does not exist" Oct 08 07:20:17 crc kubenswrapper[4693]: I1008 07:20:17.370352 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17d04184-b555-437c-b6bb-91a4e13263d8" path="/var/lib/kubelet/pods/17d04184-b555-437c-b6bb-91a4e13263d8/volumes" Oct 08 07:20:17 crc kubenswrapper[4693]: I1008 07:20:17.759116 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jb29z"] Oct 08 07:20:18 crc kubenswrapper[4693]: I1008 07:20:18.362689 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sf677"] Oct 08 07:20:18 crc kubenswrapper[4693]: I1008 07:20:18.363689 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-sf677" podUID="de56a156-ef96-4b18-bd73-d72965f7de18" containerName="registry-server" containerID="cri-o://a32cc4ab9e46860e48a0e42901efca173db6777e12a994497285906ffaf43b28" gracePeriod=2 Oct 08 07:20:18 crc kubenswrapper[4693]: I1008 07:20:18.773539 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sf677" Oct 08 07:20:18 crc kubenswrapper[4693]: I1008 07:20:18.925299 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m22w8\" (UniqueName: \"kubernetes.io/projected/de56a156-ef96-4b18-bd73-d72965f7de18-kube-api-access-m22w8\") pod \"de56a156-ef96-4b18-bd73-d72965f7de18\" (UID: \"de56a156-ef96-4b18-bd73-d72965f7de18\") " Oct 08 07:20:18 crc kubenswrapper[4693]: I1008 07:20:18.925397 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de56a156-ef96-4b18-bd73-d72965f7de18-utilities\") pod \"de56a156-ef96-4b18-bd73-d72965f7de18\" (UID: \"de56a156-ef96-4b18-bd73-d72965f7de18\") " Oct 08 07:20:18 crc kubenswrapper[4693]: I1008 07:20:18.925435 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de56a156-ef96-4b18-bd73-d72965f7de18-catalog-content\") pod \"de56a156-ef96-4b18-bd73-d72965f7de18\" (UID: \"de56a156-ef96-4b18-bd73-d72965f7de18\") " Oct 08 07:20:18 crc kubenswrapper[4693]: I1008 07:20:18.926244 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de56a156-ef96-4b18-bd73-d72965f7de18-utilities" (OuterVolumeSpecName: "utilities") pod "de56a156-ef96-4b18-bd73-d72965f7de18" (UID: "de56a156-ef96-4b18-bd73-d72965f7de18"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:20:18 crc kubenswrapper[4693]: I1008 07:20:18.931797 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de56a156-ef96-4b18-bd73-d72965f7de18-kube-api-access-m22w8" (OuterVolumeSpecName: "kube-api-access-m22w8") pod "de56a156-ef96-4b18-bd73-d72965f7de18" (UID: "de56a156-ef96-4b18-bd73-d72965f7de18"). InnerVolumeSpecName "kube-api-access-m22w8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:20:18 crc kubenswrapper[4693]: I1008 07:20:18.971223 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de56a156-ef96-4b18-bd73-d72965f7de18-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "de56a156-ef96-4b18-bd73-d72965f7de18" (UID: "de56a156-ef96-4b18-bd73-d72965f7de18"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.027567 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m22w8\" (UniqueName: \"kubernetes.io/projected/de56a156-ef96-4b18-bd73-d72965f7de18-kube-api-access-m22w8\") on node \"crc\" DevicePath \"\"" Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.027602 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de56a156-ef96-4b18-bd73-d72965f7de18-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.027611 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de56a156-ef96-4b18-bd73-d72965f7de18-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.285101 4693 generic.go:334] "Generic (PLEG): container finished" podID="de56a156-ef96-4b18-bd73-d72965f7de18" containerID="a32cc4ab9e46860e48a0e42901efca173db6777e12a994497285906ffaf43b28" exitCode=0 Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.285419 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jb29z" podUID="e88d2996-c98f-4cce-b986-21611b1de03a" containerName="registry-server" containerID="cri-o://82d6b039f829f27680339d3f97d5a3a1ec7f7c62d60e9d9f1f2cd2718ff8fc6e" gracePeriod=2 Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.285550 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sf677" Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.286911 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sf677" event={"ID":"de56a156-ef96-4b18-bd73-d72965f7de18","Type":"ContainerDied","Data":"a32cc4ab9e46860e48a0e42901efca173db6777e12a994497285906ffaf43b28"} Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.286952 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sf677" event={"ID":"de56a156-ef96-4b18-bd73-d72965f7de18","Type":"ContainerDied","Data":"aa3db492c97e65561096befd99f1bc3cb7c7f5ef7abb71ca0a2d0485bf4c99a1"} Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.286975 4693 scope.go:117] "RemoveContainer" containerID="a32cc4ab9e46860e48a0e42901efca173db6777e12a994497285906ffaf43b28" Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.321643 4693 scope.go:117] "RemoveContainer" containerID="db0961f2b2a5ab16e4118b0250e9588eae846c0ce11de2030be8aecfdd2a36f4" Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.331350 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sf677"] Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.334168 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-sf677"] Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.350978 4693 scope.go:117] "RemoveContainer" containerID="0023709637a207d88d3649bcbb3cb0634bfe92e873af8164e0a83211e9e1f7ee" Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.370572 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de56a156-ef96-4b18-bd73-d72965f7de18" path="/var/lib/kubelet/pods/de56a156-ef96-4b18-bd73-d72965f7de18/volumes" Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.453921 4693 scope.go:117] "RemoveContainer" containerID="a32cc4ab9e46860e48a0e42901efca173db6777e12a994497285906ffaf43b28" Oct 08 07:20:19 crc kubenswrapper[4693]: E1008 07:20:19.454547 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a32cc4ab9e46860e48a0e42901efca173db6777e12a994497285906ffaf43b28\": container with ID starting with a32cc4ab9e46860e48a0e42901efca173db6777e12a994497285906ffaf43b28 not found: ID does not exist" containerID="a32cc4ab9e46860e48a0e42901efca173db6777e12a994497285906ffaf43b28" Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.454719 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a32cc4ab9e46860e48a0e42901efca173db6777e12a994497285906ffaf43b28"} err="failed to get container status \"a32cc4ab9e46860e48a0e42901efca173db6777e12a994497285906ffaf43b28\": rpc error: code = NotFound desc = could not find container \"a32cc4ab9e46860e48a0e42901efca173db6777e12a994497285906ffaf43b28\": container with ID starting with a32cc4ab9e46860e48a0e42901efca173db6777e12a994497285906ffaf43b28 not found: ID does not exist" Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.454898 4693 scope.go:117] "RemoveContainer" containerID="db0961f2b2a5ab16e4118b0250e9588eae846c0ce11de2030be8aecfdd2a36f4" Oct 08 07:20:19 crc kubenswrapper[4693]: E1008 07:20:19.455730 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db0961f2b2a5ab16e4118b0250e9588eae846c0ce11de2030be8aecfdd2a36f4\": container with ID starting with db0961f2b2a5ab16e4118b0250e9588eae846c0ce11de2030be8aecfdd2a36f4 not found: ID does not exist" containerID="db0961f2b2a5ab16e4118b0250e9588eae846c0ce11de2030be8aecfdd2a36f4" Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.455772 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db0961f2b2a5ab16e4118b0250e9588eae846c0ce11de2030be8aecfdd2a36f4"} err="failed to get container status \"db0961f2b2a5ab16e4118b0250e9588eae846c0ce11de2030be8aecfdd2a36f4\": rpc error: code = NotFound desc = could not find container \"db0961f2b2a5ab16e4118b0250e9588eae846c0ce11de2030be8aecfdd2a36f4\": container with ID starting with db0961f2b2a5ab16e4118b0250e9588eae846c0ce11de2030be8aecfdd2a36f4 not found: ID does not exist" Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.455800 4693 scope.go:117] "RemoveContainer" containerID="0023709637a207d88d3649bcbb3cb0634bfe92e873af8164e0a83211e9e1f7ee" Oct 08 07:20:19 crc kubenswrapper[4693]: E1008 07:20:19.456070 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0023709637a207d88d3649bcbb3cb0634bfe92e873af8164e0a83211e9e1f7ee\": container with ID starting with 0023709637a207d88d3649bcbb3cb0634bfe92e873af8164e0a83211e9e1f7ee not found: ID does not exist" containerID="0023709637a207d88d3649bcbb3cb0634bfe92e873af8164e0a83211e9e1f7ee" Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.456105 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0023709637a207d88d3649bcbb3cb0634bfe92e873af8164e0a83211e9e1f7ee"} err="failed to get container status \"0023709637a207d88d3649bcbb3cb0634bfe92e873af8164e0a83211e9e1f7ee\": rpc error: code = NotFound desc = could not find container \"0023709637a207d88d3649bcbb3cb0634bfe92e873af8164e0a83211e9e1f7ee\": container with ID starting with 0023709637a207d88d3649bcbb3cb0634bfe92e873af8164e0a83211e9e1f7ee not found: ID does not exist" Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.778217 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jb29z" Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.840090 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e88d2996-c98f-4cce-b986-21611b1de03a-catalog-content\") pod \"e88d2996-c98f-4cce-b986-21611b1de03a\" (UID: \"e88d2996-c98f-4cce-b986-21611b1de03a\") " Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.840239 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e88d2996-c98f-4cce-b986-21611b1de03a-utilities\") pod \"e88d2996-c98f-4cce-b986-21611b1de03a\" (UID: \"e88d2996-c98f-4cce-b986-21611b1de03a\") " Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.840287 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgt7h\" (UniqueName: \"kubernetes.io/projected/e88d2996-c98f-4cce-b986-21611b1de03a-kube-api-access-wgt7h\") pod \"e88d2996-c98f-4cce-b986-21611b1de03a\" (UID: \"e88d2996-c98f-4cce-b986-21611b1de03a\") " Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.841292 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e88d2996-c98f-4cce-b986-21611b1de03a-utilities" (OuterVolumeSpecName: "utilities") pod "e88d2996-c98f-4cce-b986-21611b1de03a" (UID: "e88d2996-c98f-4cce-b986-21611b1de03a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.850842 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e88d2996-c98f-4cce-b986-21611b1de03a-kube-api-access-wgt7h" (OuterVolumeSpecName: "kube-api-access-wgt7h") pod "e88d2996-c98f-4cce-b986-21611b1de03a" (UID: "e88d2996-c98f-4cce-b986-21611b1de03a"). InnerVolumeSpecName "kube-api-access-wgt7h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.942251 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e88d2996-c98f-4cce-b986-21611b1de03a-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.942295 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgt7h\" (UniqueName: \"kubernetes.io/projected/e88d2996-c98f-4cce-b986-21611b1de03a-kube-api-access-wgt7h\") on node \"crc\" DevicePath \"\"" Oct 08 07:20:19 crc kubenswrapper[4693]: I1008 07:20:19.953284 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e88d2996-c98f-4cce-b986-21611b1de03a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e88d2996-c98f-4cce-b986-21611b1de03a" (UID: "e88d2996-c98f-4cce-b986-21611b1de03a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.043702 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e88d2996-c98f-4cce-b986-21611b1de03a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.163685 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gxtd2"] Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.164415 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gxtd2" podUID="31f9f4e1-d825-4a21-883a-c670744048a7" containerName="registry-server" containerID="cri-o://732167df2ba2fd6058dc74e439c22eefec85698550497aeab03c63642fc05856" gracePeriod=2 Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.291743 4693 generic.go:334] "Generic (PLEG): container finished" podID="31f9f4e1-d825-4a21-883a-c670744048a7" containerID="732167df2ba2fd6058dc74e439c22eefec85698550497aeab03c63642fc05856" exitCode=0 Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.291838 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gxtd2" event={"ID":"31f9f4e1-d825-4a21-883a-c670744048a7","Type":"ContainerDied","Data":"732167df2ba2fd6058dc74e439c22eefec85698550497aeab03c63642fc05856"} Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.296366 4693 generic.go:334] "Generic (PLEG): container finished" podID="e88d2996-c98f-4cce-b986-21611b1de03a" containerID="82d6b039f829f27680339d3f97d5a3a1ec7f7c62d60e9d9f1f2cd2718ff8fc6e" exitCode=0 Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.296427 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jb29z" event={"ID":"e88d2996-c98f-4cce-b986-21611b1de03a","Type":"ContainerDied","Data":"82d6b039f829f27680339d3f97d5a3a1ec7f7c62d60e9d9f1f2cd2718ff8fc6e"} Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.296511 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jb29z" Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.296564 4693 scope.go:117] "RemoveContainer" containerID="82d6b039f829f27680339d3f97d5a3a1ec7f7c62d60e9d9f1f2cd2718ff8fc6e" Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.296519 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jb29z" event={"ID":"e88d2996-c98f-4cce-b986-21611b1de03a","Type":"ContainerDied","Data":"bf775062815544824fbd6b8af4fc1c183110d270d9636af0a7f64a89b432e0c5"} Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.319573 4693 scope.go:117] "RemoveContainer" containerID="f2723d854b7d6cac4e909cbddd890d114f0e093d5ecedeb7950839fad76dfa9a" Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.339753 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jb29z"] Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.345578 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jb29z"] Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.361887 4693 scope.go:117] "RemoveContainer" containerID="eacb28bd8bccfb0bfac88f9d325021989c4619170d67394f09cc80e69a9880a8" Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.380141 4693 scope.go:117] "RemoveContainer" containerID="82d6b039f829f27680339d3f97d5a3a1ec7f7c62d60e9d9f1f2cd2718ff8fc6e" Oct 08 07:20:20 crc kubenswrapper[4693]: E1008 07:20:20.380653 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"82d6b039f829f27680339d3f97d5a3a1ec7f7c62d60e9d9f1f2cd2718ff8fc6e\": container with ID starting with 82d6b039f829f27680339d3f97d5a3a1ec7f7c62d60e9d9f1f2cd2718ff8fc6e not found: ID does not exist" containerID="82d6b039f829f27680339d3f97d5a3a1ec7f7c62d60e9d9f1f2cd2718ff8fc6e" Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.380692 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"82d6b039f829f27680339d3f97d5a3a1ec7f7c62d60e9d9f1f2cd2718ff8fc6e"} err="failed to get container status \"82d6b039f829f27680339d3f97d5a3a1ec7f7c62d60e9d9f1f2cd2718ff8fc6e\": rpc error: code = NotFound desc = could not find container \"82d6b039f829f27680339d3f97d5a3a1ec7f7c62d60e9d9f1f2cd2718ff8fc6e\": container with ID starting with 82d6b039f829f27680339d3f97d5a3a1ec7f7c62d60e9d9f1f2cd2718ff8fc6e not found: ID does not exist" Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.380724 4693 scope.go:117] "RemoveContainer" containerID="f2723d854b7d6cac4e909cbddd890d114f0e093d5ecedeb7950839fad76dfa9a" Oct 08 07:20:20 crc kubenswrapper[4693]: E1008 07:20:20.381006 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2723d854b7d6cac4e909cbddd890d114f0e093d5ecedeb7950839fad76dfa9a\": container with ID starting with f2723d854b7d6cac4e909cbddd890d114f0e093d5ecedeb7950839fad76dfa9a not found: ID does not exist" containerID="f2723d854b7d6cac4e909cbddd890d114f0e093d5ecedeb7950839fad76dfa9a" Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.381025 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2723d854b7d6cac4e909cbddd890d114f0e093d5ecedeb7950839fad76dfa9a"} err="failed to get container status \"f2723d854b7d6cac4e909cbddd890d114f0e093d5ecedeb7950839fad76dfa9a\": rpc error: code = NotFound desc = could not find container \"f2723d854b7d6cac4e909cbddd890d114f0e093d5ecedeb7950839fad76dfa9a\": container with ID starting with f2723d854b7d6cac4e909cbddd890d114f0e093d5ecedeb7950839fad76dfa9a not found: ID does not exist" Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.381039 4693 scope.go:117] "RemoveContainer" containerID="eacb28bd8bccfb0bfac88f9d325021989c4619170d67394f09cc80e69a9880a8" Oct 08 07:20:20 crc kubenswrapper[4693]: E1008 07:20:20.381377 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eacb28bd8bccfb0bfac88f9d325021989c4619170d67394f09cc80e69a9880a8\": container with ID starting with eacb28bd8bccfb0bfac88f9d325021989c4619170d67394f09cc80e69a9880a8 not found: ID does not exist" containerID="eacb28bd8bccfb0bfac88f9d325021989c4619170d67394f09cc80e69a9880a8" Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.381439 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eacb28bd8bccfb0bfac88f9d325021989c4619170d67394f09cc80e69a9880a8"} err="failed to get container status \"eacb28bd8bccfb0bfac88f9d325021989c4619170d67394f09cc80e69a9880a8\": rpc error: code = NotFound desc = could not find container \"eacb28bd8bccfb0bfac88f9d325021989c4619170d67394f09cc80e69a9880a8\": container with ID starting with eacb28bd8bccfb0bfac88f9d325021989c4619170d67394f09cc80e69a9880a8 not found: ID does not exist" Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.580177 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gxtd2" Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.654537 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31f9f4e1-d825-4a21-883a-c670744048a7-utilities\") pod \"31f9f4e1-d825-4a21-883a-c670744048a7\" (UID: \"31f9f4e1-d825-4a21-883a-c670744048a7\") " Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.654678 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31f9f4e1-d825-4a21-883a-c670744048a7-catalog-content\") pod \"31f9f4e1-d825-4a21-883a-c670744048a7\" (UID: \"31f9f4e1-d825-4a21-883a-c670744048a7\") " Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.654749 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vntk6\" (UniqueName: \"kubernetes.io/projected/31f9f4e1-d825-4a21-883a-c670744048a7-kube-api-access-vntk6\") pod \"31f9f4e1-d825-4a21-883a-c670744048a7\" (UID: \"31f9f4e1-d825-4a21-883a-c670744048a7\") " Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.655549 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31f9f4e1-d825-4a21-883a-c670744048a7-utilities" (OuterVolumeSpecName: "utilities") pod "31f9f4e1-d825-4a21-883a-c670744048a7" (UID: "31f9f4e1-d825-4a21-883a-c670744048a7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.661657 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31f9f4e1-d825-4a21-883a-c670744048a7-kube-api-access-vntk6" (OuterVolumeSpecName: "kube-api-access-vntk6") pod "31f9f4e1-d825-4a21-883a-c670744048a7" (UID: "31f9f4e1-d825-4a21-883a-c670744048a7"). InnerVolumeSpecName "kube-api-access-vntk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.673974 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31f9f4e1-d825-4a21-883a-c670744048a7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "31f9f4e1-d825-4a21-883a-c670744048a7" (UID: "31f9f4e1-d825-4a21-883a-c670744048a7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.756156 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31f9f4e1-d825-4a21-883a-c670744048a7-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.756198 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vntk6\" (UniqueName: \"kubernetes.io/projected/31f9f4e1-d825-4a21-883a-c670744048a7-kube-api-access-vntk6\") on node \"crc\" DevicePath \"\"" Oct 08 07:20:20 crc kubenswrapper[4693]: I1008 07:20:20.756211 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31f9f4e1-d825-4a21-883a-c670744048a7-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 07:20:21 crc kubenswrapper[4693]: I1008 07:20:21.308784 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gxtd2" event={"ID":"31f9f4e1-d825-4a21-883a-c670744048a7","Type":"ContainerDied","Data":"0dbecbdf699404d6fae2f2fa7c2c01365aae7ed94eaf5a33e38c4dcd26acf278"} Oct 08 07:20:21 crc kubenswrapper[4693]: I1008 07:20:21.309275 4693 scope.go:117] "RemoveContainer" containerID="732167df2ba2fd6058dc74e439c22eefec85698550497aeab03c63642fc05856" Oct 08 07:20:21 crc kubenswrapper[4693]: I1008 07:20:21.308951 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gxtd2" Oct 08 07:20:21 crc kubenswrapper[4693]: I1008 07:20:21.339002 4693 scope.go:117] "RemoveContainer" containerID="4cb0feee37f315f6c5b9fda5eb3d3e46f8ac03a940be94a1481209ce52720d81" Oct 08 07:20:21 crc kubenswrapper[4693]: I1008 07:20:21.351906 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gxtd2"] Oct 08 07:20:21 crc kubenswrapper[4693]: I1008 07:20:21.358712 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gxtd2"] Oct 08 07:20:21 crc kubenswrapper[4693]: I1008 07:20:21.372142 4693 scope.go:117] "RemoveContainer" containerID="5d01e65d4f39165d8a03c8497db464e3892824ca7733a782d567ba584108e43c" Oct 08 07:20:21 crc kubenswrapper[4693]: I1008 07:20:21.378125 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31f9f4e1-d825-4a21-883a-c670744048a7" path="/var/lib/kubelet/pods/31f9f4e1-d825-4a21-883a-c670744048a7/volumes" Oct 08 07:20:21 crc kubenswrapper[4693]: I1008 07:20:21.379257 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e88d2996-c98f-4cce-b986-21611b1de03a" path="/var/lib/kubelet/pods/e88d2996-c98f-4cce-b986-21611b1de03a/volumes" Oct 08 07:20:23 crc kubenswrapper[4693]: I1008 07:20:23.490226 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:20:23 crc kubenswrapper[4693]: I1008 07:20:23.490711 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:20:23 crc kubenswrapper[4693]: I1008 07:20:23.490784 4693 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 07:20:23 crc kubenswrapper[4693]: I1008 07:20:23.491494 4693 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75"} pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 08 07:20:23 crc kubenswrapper[4693]: I1008 07:20:23.491560 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" containerID="cri-o://f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75" gracePeriod=600 Oct 08 07:20:24 crc kubenswrapper[4693]: I1008 07:20:24.355233 4693 generic.go:334] "Generic (PLEG): container finished" podID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerID="f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75" exitCode=0 Oct 08 07:20:24 crc kubenswrapper[4693]: I1008 07:20:24.355362 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerDied","Data":"f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75"} Oct 08 07:20:24 crc kubenswrapper[4693]: I1008 07:20:24.356128 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerStarted","Data":"ddfcf64630e8b7f10b94986e58e5e52c512871e23889b5f7b1fc3041780912b6"} Oct 08 07:20:29 crc kubenswrapper[4693]: I1008 07:20:29.725579 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-rjk9l"] Oct 08 07:20:54 crc kubenswrapper[4693]: I1008 07:20:54.762746 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" podUID="e9a6efb0-34a1-4419-a097-14877cb1371c" containerName="oauth-openshift" containerID="cri-o://024bf21f16485252b88dd8e0617efd1650c93bcc2fa93df769c4ffab6e042b8f" gracePeriod=15 Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.201144 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.238747 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-848ffdc94b-vnpqg"] Oct 08 07:20:55 crc kubenswrapper[4693]: E1008 07:20:55.239500 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de56a156-ef96-4b18-bd73-d72965f7de18" containerName="extract-content" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.239520 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="de56a156-ef96-4b18-bd73-d72965f7de18" containerName="extract-content" Oct 08 07:20:55 crc kubenswrapper[4693]: E1008 07:20:55.239538 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17d04184-b555-437c-b6bb-91a4e13263d8" containerName="registry-server" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.239547 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="17d04184-b555-437c-b6bb-91a4e13263d8" containerName="registry-server" Oct 08 07:20:55 crc kubenswrapper[4693]: E1008 07:20:55.239586 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17d04184-b555-437c-b6bb-91a4e13263d8" containerName="extract-content" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.239596 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="17d04184-b555-437c-b6bb-91a4e13263d8" containerName="extract-content" Oct 08 07:20:55 crc kubenswrapper[4693]: E1008 07:20:55.239607 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31f9f4e1-d825-4a21-883a-c670744048a7" containerName="registry-server" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.239615 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="31f9f4e1-d825-4a21-883a-c670744048a7" containerName="registry-server" Oct 08 07:20:55 crc kubenswrapper[4693]: E1008 07:20:55.239629 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de56a156-ef96-4b18-bd73-d72965f7de18" containerName="registry-server" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.239639 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="de56a156-ef96-4b18-bd73-d72965f7de18" containerName="registry-server" Oct 08 07:20:55 crc kubenswrapper[4693]: E1008 07:20:55.239674 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e88d2996-c98f-4cce-b986-21611b1de03a" containerName="extract-content" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.239683 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="e88d2996-c98f-4cce-b986-21611b1de03a" containerName="extract-content" Oct 08 07:20:55 crc kubenswrapper[4693]: E1008 07:20:55.239695 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31f9f4e1-d825-4a21-883a-c670744048a7" containerName="extract-content" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.239703 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="31f9f4e1-d825-4a21-883a-c670744048a7" containerName="extract-content" Oct 08 07:20:55 crc kubenswrapper[4693]: E1008 07:20:55.239714 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de56a156-ef96-4b18-bd73-d72965f7de18" containerName="extract-utilities" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.239722 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="de56a156-ef96-4b18-bd73-d72965f7de18" containerName="extract-utilities" Oct 08 07:20:55 crc kubenswrapper[4693]: E1008 07:20:55.239758 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e88d2996-c98f-4cce-b986-21611b1de03a" containerName="registry-server" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.239768 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="e88d2996-c98f-4cce-b986-21611b1de03a" containerName="registry-server" Oct 08 07:20:55 crc kubenswrapper[4693]: E1008 07:20:55.239781 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31f9f4e1-d825-4a21-883a-c670744048a7" containerName="extract-utilities" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.239789 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="31f9f4e1-d825-4a21-883a-c670744048a7" containerName="extract-utilities" Oct 08 07:20:55 crc kubenswrapper[4693]: E1008 07:20:55.239801 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17d04184-b555-437c-b6bb-91a4e13263d8" containerName="extract-utilities" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.239845 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="17d04184-b555-437c-b6bb-91a4e13263d8" containerName="extract-utilities" Oct 08 07:20:55 crc kubenswrapper[4693]: E1008 07:20:55.239858 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9a6efb0-34a1-4419-a097-14877cb1371c" containerName="oauth-openshift" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.239866 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9a6efb0-34a1-4419-a097-14877cb1371c" containerName="oauth-openshift" Oct 08 07:20:55 crc kubenswrapper[4693]: E1008 07:20:55.239878 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a08a011-72cb-473d-b8c8-3357b34e680f" containerName="pruner" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.239886 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a08a011-72cb-473d-b8c8-3357b34e680f" containerName="pruner" Oct 08 07:20:55 crc kubenswrapper[4693]: E1008 07:20:55.239895 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e88d2996-c98f-4cce-b986-21611b1de03a" containerName="extract-utilities" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.239929 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="e88d2996-c98f-4cce-b986-21611b1de03a" containerName="extract-utilities" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.240405 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="17d04184-b555-437c-b6bb-91a4e13263d8" containerName="registry-server" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.240459 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="e88d2996-c98f-4cce-b986-21611b1de03a" containerName="registry-server" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.240475 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="31f9f4e1-d825-4a21-883a-c670744048a7" containerName="registry-server" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.240547 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="de56a156-ef96-4b18-bd73-d72965f7de18" containerName="registry-server" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.240561 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9a6efb0-34a1-4419-a097-14877cb1371c" containerName="oauth-openshift" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.240573 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a08a011-72cb-473d-b8c8-3357b34e680f" containerName="pruner" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.241406 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.256492 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-848ffdc94b-vnpqg"] Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.341636 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-user-idp-0-file-data\") pod \"e9a6efb0-34a1-4419-a097-14877cb1371c\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.341738 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-user-template-login\") pod \"e9a6efb0-34a1-4419-a097-14877cb1371c\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.341766 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e9a6efb0-34a1-4419-a097-14877cb1371c-audit-dir\") pod \"e9a6efb0-34a1-4419-a097-14877cb1371c\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.341791 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-router-certs\") pod \"e9a6efb0-34a1-4419-a097-14877cb1371c\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.341834 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-user-template-provider-selection\") pod \"e9a6efb0-34a1-4419-a097-14877cb1371c\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.341886 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wvz9k\" (UniqueName: \"kubernetes.io/projected/e9a6efb0-34a1-4419-a097-14877cb1371c-kube-api-access-wvz9k\") pod \"e9a6efb0-34a1-4419-a097-14877cb1371c\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.341930 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-session\") pod \"e9a6efb0-34a1-4419-a097-14877cb1371c\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.341956 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-cliconfig\") pod \"e9a6efb0-34a1-4419-a097-14877cb1371c\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.342017 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e9a6efb0-34a1-4419-a097-14877cb1371c-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "e9a6efb0-34a1-4419-a097-14877cb1371c" (UID: "e9a6efb0-34a1-4419-a097-14877cb1371c"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.342176 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-ocp-branding-template\") pod \"e9a6efb0-34a1-4419-a097-14877cb1371c\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.342234 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-serving-cert\") pod \"e9a6efb0-34a1-4419-a097-14877cb1371c\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.343400 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-service-ca\") pod \"e9a6efb0-34a1-4419-a097-14877cb1371c\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.343435 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "e9a6efb0-34a1-4419-a097-14877cb1371c" (UID: "e9a6efb0-34a1-4419-a097-14877cb1371c"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.343514 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e9a6efb0-34a1-4419-a097-14877cb1371c-audit-policies\") pod \"e9a6efb0-34a1-4419-a097-14877cb1371c\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.343590 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-trusted-ca-bundle\") pod \"e9a6efb0-34a1-4419-a097-14877cb1371c\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.343628 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-user-template-error\") pod \"e9a6efb0-34a1-4419-a097-14877cb1371c\" (UID: \"e9a6efb0-34a1-4419-a097-14877cb1371c\") " Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.343991 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-system-router-certs\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.344157 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.344228 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-system-serving-cert\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.344263 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c0f883fe-3ee6-4be3-970d-1b71dbceec27-audit-policies\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.344352 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-system-cliconfig\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.344412 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.344453 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-system-session\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.344491 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-user-template-error\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.344523 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c0f883fe-3ee6-4be3-970d-1b71dbceec27-audit-dir\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.344563 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82z54\" (UniqueName: \"kubernetes.io/projected/c0f883fe-3ee6-4be3-970d-1b71dbceec27-kube-api-access-82z54\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.344606 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.344668 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-user-template-login\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.344711 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-system-service-ca\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.344750 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.344871 4693 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e9a6efb0-34a1-4419-a097-14877cb1371c-audit-dir\") on node \"crc\" DevicePath \"\"" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.344880 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "e9a6efb0-34a1-4419-a097-14877cb1371c" (UID: "e9a6efb0-34a1-4419-a097-14877cb1371c"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.344898 4693 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.345700 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "e9a6efb0-34a1-4419-a097-14877cb1371c" (UID: "e9a6efb0-34a1-4419-a097-14877cb1371c"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.346518 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9a6efb0-34a1-4419-a097-14877cb1371c-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "e9a6efb0-34a1-4419-a097-14877cb1371c" (UID: "e9a6efb0-34a1-4419-a097-14877cb1371c"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.350605 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "e9a6efb0-34a1-4419-a097-14877cb1371c" (UID: "e9a6efb0-34a1-4419-a097-14877cb1371c"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.351019 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "e9a6efb0-34a1-4419-a097-14877cb1371c" (UID: "e9a6efb0-34a1-4419-a097-14877cb1371c"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.351323 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "e9a6efb0-34a1-4419-a097-14877cb1371c" (UID: "e9a6efb0-34a1-4419-a097-14877cb1371c"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.351508 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "e9a6efb0-34a1-4419-a097-14877cb1371c" (UID: "e9a6efb0-34a1-4419-a097-14877cb1371c"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.351735 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "e9a6efb0-34a1-4419-a097-14877cb1371c" (UID: "e9a6efb0-34a1-4419-a097-14877cb1371c"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.352196 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "e9a6efb0-34a1-4419-a097-14877cb1371c" (UID: "e9a6efb0-34a1-4419-a097-14877cb1371c"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.352711 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9a6efb0-34a1-4419-a097-14877cb1371c-kube-api-access-wvz9k" (OuterVolumeSpecName: "kube-api-access-wvz9k") pod "e9a6efb0-34a1-4419-a097-14877cb1371c" (UID: "e9a6efb0-34a1-4419-a097-14877cb1371c"). InnerVolumeSpecName "kube-api-access-wvz9k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.353004 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "e9a6efb0-34a1-4419-a097-14877cb1371c" (UID: "e9a6efb0-34a1-4419-a097-14877cb1371c"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.356277 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "e9a6efb0-34a1-4419-a097-14877cb1371c" (UID: "e9a6efb0-34a1-4419-a097-14877cb1371c"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.446013 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-system-cliconfig\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.446412 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.446458 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-system-session\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.446496 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-user-template-error\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.446544 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c0f883fe-3ee6-4be3-970d-1b71dbceec27-audit-dir\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.446567 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82z54\" (UniqueName: \"kubernetes.io/projected/c0f883fe-3ee6-4be3-970d-1b71dbceec27-kube-api-access-82z54\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.446617 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.446652 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-user-template-login\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.446684 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c0f883fe-3ee6-4be3-970d-1b71dbceec27-audit-dir\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.446703 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-system-service-ca\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.446729 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.446787 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-system-router-certs\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.446876 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.446933 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-system-serving-cert\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.446970 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c0f883fe-3ee6-4be3-970d-1b71dbceec27-audit-policies\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.447714 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-system-cliconfig\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.448015 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-system-service-ca\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.448042 4693 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.448171 4693 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.448193 4693 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.448215 4693 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.448240 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wvz9k\" (UniqueName: \"kubernetes.io/projected/e9a6efb0-34a1-4419-a097-14877cb1371c-kube-api-access-wvz9k\") on node \"crc\" DevicePath \"\"" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.448260 4693 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.448277 4693 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.448297 4693 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.448313 4693 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.448332 4693 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e9a6efb0-34a1-4419-a097-14877cb1371c-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.448350 4693 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.448369 4693 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e9a6efb0-34a1-4419-a097-14877cb1371c-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.448937 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.449181 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c0f883fe-3ee6-4be3-970d-1b71dbceec27-audit-policies\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.451311 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-user-template-error\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.451970 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.452123 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.452499 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-system-serving-cert\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.453560 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-user-template-login\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.453787 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.454267 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-system-session\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.454994 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c0f883fe-3ee6-4be3-970d-1b71dbceec27-v4-0-config-system-router-certs\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.473190 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82z54\" (UniqueName: \"kubernetes.io/projected/c0f883fe-3ee6-4be3-970d-1b71dbceec27-kube-api-access-82z54\") pod \"oauth-openshift-848ffdc94b-vnpqg\" (UID: \"c0f883fe-3ee6-4be3-970d-1b71dbceec27\") " pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.556216 4693 generic.go:334] "Generic (PLEG): container finished" podID="e9a6efb0-34a1-4419-a097-14877cb1371c" containerID="024bf21f16485252b88dd8e0617efd1650c93bcc2fa93df769c4ffab6e042b8f" exitCode=0 Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.556284 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" event={"ID":"e9a6efb0-34a1-4419-a097-14877cb1371c","Type":"ContainerDied","Data":"024bf21f16485252b88dd8e0617efd1650c93bcc2fa93df769c4ffab6e042b8f"} Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.556294 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.556321 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-rjk9l" event={"ID":"e9a6efb0-34a1-4419-a097-14877cb1371c","Type":"ContainerDied","Data":"001cd2a75b34fd678b07d97b8a06f4f5a108cfd1171b7de7300881d051708c09"} Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.556351 4693 scope.go:117] "RemoveContainer" containerID="024bf21f16485252b88dd8e0617efd1650c93bcc2fa93df769c4ffab6e042b8f" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.578348 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-rjk9l"] Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.582986 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.588715 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-rjk9l"] Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.595771 4693 scope.go:117] "RemoveContainer" containerID="024bf21f16485252b88dd8e0617efd1650c93bcc2fa93df769c4ffab6e042b8f" Oct 08 07:20:55 crc kubenswrapper[4693]: E1008 07:20:55.597459 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"024bf21f16485252b88dd8e0617efd1650c93bcc2fa93df769c4ffab6e042b8f\": container with ID starting with 024bf21f16485252b88dd8e0617efd1650c93bcc2fa93df769c4ffab6e042b8f not found: ID does not exist" containerID="024bf21f16485252b88dd8e0617efd1650c93bcc2fa93df769c4ffab6e042b8f" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.597509 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"024bf21f16485252b88dd8e0617efd1650c93bcc2fa93df769c4ffab6e042b8f"} err="failed to get container status \"024bf21f16485252b88dd8e0617efd1650c93bcc2fa93df769c4ffab6e042b8f\": rpc error: code = NotFound desc = could not find container \"024bf21f16485252b88dd8e0617efd1650c93bcc2fa93df769c4ffab6e042b8f\": container with ID starting with 024bf21f16485252b88dd8e0617efd1650c93bcc2fa93df769c4ffab6e042b8f not found: ID does not exist" Oct 08 07:20:55 crc kubenswrapper[4693]: I1008 07:20:55.838692 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-848ffdc94b-vnpqg"] Oct 08 07:20:56 crc kubenswrapper[4693]: I1008 07:20:56.568262 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" event={"ID":"c0f883fe-3ee6-4be3-970d-1b71dbceec27","Type":"ContainerStarted","Data":"d725463117e127391fdc44db85ece772975ec2eb25c646b6dd09c48d4ab2229b"} Oct 08 07:20:56 crc kubenswrapper[4693]: I1008 07:20:56.568666 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" event={"ID":"c0f883fe-3ee6-4be3-970d-1b71dbceec27","Type":"ContainerStarted","Data":"b911bf8e48ecb837dcc71c7f51475111915773028523c4d36afba015f7e458d0"} Oct 08 07:20:56 crc kubenswrapper[4693]: I1008 07:20:56.568949 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:56 crc kubenswrapper[4693]: I1008 07:20:56.575050 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" Oct 08 07:20:56 crc kubenswrapper[4693]: I1008 07:20:56.596650 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-848ffdc94b-vnpqg" podStartSLOduration=27.596627073 podStartE2EDuration="27.596627073s" podCreationTimestamp="2025-10-08 07:20:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:20:56.591572669 +0000 UTC m=+241.962537614" watchObservedRunningTime="2025-10-08 07:20:56.596627073 +0000 UTC m=+241.967592038" Oct 08 07:20:57 crc kubenswrapper[4693]: I1008 07:20:57.373937 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9a6efb0-34a1-4419-a097-14877cb1371c" path="/var/lib/kubelet/pods/e9a6efb0-34a1-4419-a097-14877cb1371c/volumes" Oct 08 07:21:08 crc kubenswrapper[4693]: I1008 07:21:08.599276 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7x6qs"] Oct 08 07:21:08 crc kubenswrapper[4693]: I1008 07:21:08.605084 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dd458"] Oct 08 07:21:08 crc kubenswrapper[4693]: I1008 07:21:08.605443 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dd458" podUID="ca170ba2-9128-4ba1-9084-c2373299a0d9" containerName="registry-server" containerID="cri-o://c707c4ec02bbf1f947697bb094a864b96db3178c6ff4de34ac11b172e9ead064" gracePeriod=30 Oct 08 07:21:08 crc kubenswrapper[4693]: I1008 07:21:08.629569 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-97z2l"] Oct 08 07:21:08 crc kubenswrapper[4693]: I1008 07:21:08.630395 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-97z2l" podUID="a9d1eea1-3570-4937-a89b-5c6d87551b30" containerName="marketplace-operator" containerID="cri-o://183ed83e4617e87f634107d80c50c6ae890b6042d71a02db36f5aa999f730163" gracePeriod=30 Oct 08 07:21:08 crc kubenswrapper[4693]: I1008 07:21:08.647413 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rxdhl"] Oct 08 07:21:08 crc kubenswrapper[4693]: I1008 07:21:08.651068 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2q242"] Oct 08 07:21:08 crc kubenswrapper[4693]: I1008 07:21:08.651453 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2q242" podUID="3163591b-81ce-4897-8170-961affb60344" containerName="registry-server" containerID="cri-o://6ab1cf2980b4307c554ae0a233b4cbfd66b8eeccfc08ce6b9c266dfb15ce4979" gracePeriod=30 Oct 08 07:21:08 crc kubenswrapper[4693]: I1008 07:21:08.653892 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nkdmr"] Oct 08 07:21:08 crc kubenswrapper[4693]: I1008 07:21:08.654831 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nkdmr" Oct 08 07:21:08 crc kubenswrapper[4693]: I1008 07:21:08.682439 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-7x6qs" podUID="b4f7b0f9-4902-4768-94ae-0c98e7814f9f" containerName="registry-server" containerID="cri-o://8b3f1505abd1358fec56c095d9269dc35dd62ea06fb720bb637ac660c29ae637" gracePeriod=30 Oct 08 07:21:08 crc kubenswrapper[4693]: I1008 07:21:08.682704 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rxdhl" podUID="b509fff2-7ab7-4e9a-9057-5fda5fa66f41" containerName="registry-server" containerID="cri-o://841b757bf7c5b22f42d225e22dbb49d5b002813115e8647a5249a52a93c9b7d5" gracePeriod=30 Oct 08 07:21:08 crc kubenswrapper[4693]: I1008 07:21:08.683865 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nkdmr"] Oct 08 07:21:08 crc kubenswrapper[4693]: I1008 07:21:08.745745 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9c82bb62-a293-463f-ba14-c6fcf26e3a90-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-nkdmr\" (UID: \"9c82bb62-a293-463f-ba14-c6fcf26e3a90\") " pod="openshift-marketplace/marketplace-operator-79b997595-nkdmr" Oct 08 07:21:08 crc kubenswrapper[4693]: I1008 07:21:08.745856 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9c82bb62-a293-463f-ba14-c6fcf26e3a90-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-nkdmr\" (UID: \"9c82bb62-a293-463f-ba14-c6fcf26e3a90\") " pod="openshift-marketplace/marketplace-operator-79b997595-nkdmr" Oct 08 07:21:08 crc kubenswrapper[4693]: I1008 07:21:08.745941 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-585sf\" (UniqueName: \"kubernetes.io/projected/9c82bb62-a293-463f-ba14-c6fcf26e3a90-kube-api-access-585sf\") pod \"marketplace-operator-79b997595-nkdmr\" (UID: \"9c82bb62-a293-463f-ba14-c6fcf26e3a90\") " pod="openshift-marketplace/marketplace-operator-79b997595-nkdmr" Oct 08 07:21:08 crc kubenswrapper[4693]: I1008 07:21:08.848300 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-585sf\" (UniqueName: \"kubernetes.io/projected/9c82bb62-a293-463f-ba14-c6fcf26e3a90-kube-api-access-585sf\") pod \"marketplace-operator-79b997595-nkdmr\" (UID: \"9c82bb62-a293-463f-ba14-c6fcf26e3a90\") " pod="openshift-marketplace/marketplace-operator-79b997595-nkdmr" Oct 08 07:21:08 crc kubenswrapper[4693]: I1008 07:21:08.850806 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9c82bb62-a293-463f-ba14-c6fcf26e3a90-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-nkdmr\" (UID: \"9c82bb62-a293-463f-ba14-c6fcf26e3a90\") " pod="openshift-marketplace/marketplace-operator-79b997595-nkdmr" Oct 08 07:21:08 crc kubenswrapper[4693]: I1008 07:21:08.852335 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9c82bb62-a293-463f-ba14-c6fcf26e3a90-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-nkdmr\" (UID: \"9c82bb62-a293-463f-ba14-c6fcf26e3a90\") " pod="openshift-marketplace/marketplace-operator-79b997595-nkdmr" Oct 08 07:21:08 crc kubenswrapper[4693]: I1008 07:21:08.852546 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9c82bb62-a293-463f-ba14-c6fcf26e3a90-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-nkdmr\" (UID: \"9c82bb62-a293-463f-ba14-c6fcf26e3a90\") " pod="openshift-marketplace/marketplace-operator-79b997595-nkdmr" Oct 08 07:21:08 crc kubenswrapper[4693]: I1008 07:21:08.860692 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9c82bb62-a293-463f-ba14-c6fcf26e3a90-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-nkdmr\" (UID: \"9c82bb62-a293-463f-ba14-c6fcf26e3a90\") " pod="openshift-marketplace/marketplace-operator-79b997595-nkdmr" Oct 08 07:21:08 crc kubenswrapper[4693]: I1008 07:21:08.865548 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-585sf\" (UniqueName: \"kubernetes.io/projected/9c82bb62-a293-463f-ba14-c6fcf26e3a90-kube-api-access-585sf\") pod \"marketplace-operator-79b997595-nkdmr\" (UID: \"9c82bb62-a293-463f-ba14-c6fcf26e3a90\") " pod="openshift-marketplace/marketplace-operator-79b997595-nkdmr" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.028199 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nkdmr" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.092880 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dd458" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.154203 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2q242" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.216798 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rxdhl" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.261128 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3163591b-81ce-4897-8170-961affb60344-catalog-content\") pod \"3163591b-81ce-4897-8170-961affb60344\" (UID: \"3163591b-81ce-4897-8170-961affb60344\") " Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.261193 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lg99b\" (UniqueName: \"kubernetes.io/projected/ca170ba2-9128-4ba1-9084-c2373299a0d9-kube-api-access-lg99b\") pod \"ca170ba2-9128-4ba1-9084-c2373299a0d9\" (UID: \"ca170ba2-9128-4ba1-9084-c2373299a0d9\") " Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.261230 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca170ba2-9128-4ba1-9084-c2373299a0d9-utilities\") pod \"ca170ba2-9128-4ba1-9084-c2373299a0d9\" (UID: \"ca170ba2-9128-4ba1-9084-c2373299a0d9\") " Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.261247 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xhbdv\" (UniqueName: \"kubernetes.io/projected/3163591b-81ce-4897-8170-961affb60344-kube-api-access-xhbdv\") pod \"3163591b-81ce-4897-8170-961affb60344\" (UID: \"3163591b-81ce-4897-8170-961affb60344\") " Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.261284 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca170ba2-9128-4ba1-9084-c2373299a0d9-catalog-content\") pod \"ca170ba2-9128-4ba1-9084-c2373299a0d9\" (UID: \"ca170ba2-9128-4ba1-9084-c2373299a0d9\") " Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.261325 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3163591b-81ce-4897-8170-961affb60344-utilities\") pod \"3163591b-81ce-4897-8170-961affb60344\" (UID: \"3163591b-81ce-4897-8170-961affb60344\") " Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.262710 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3163591b-81ce-4897-8170-961affb60344-utilities" (OuterVolumeSpecName: "utilities") pod "3163591b-81ce-4897-8170-961affb60344" (UID: "3163591b-81ce-4897-8170-961affb60344"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.262967 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca170ba2-9128-4ba1-9084-c2373299a0d9-utilities" (OuterVolumeSpecName: "utilities") pod "ca170ba2-9128-4ba1-9084-c2373299a0d9" (UID: "ca170ba2-9128-4ba1-9084-c2373299a0d9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.267776 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca170ba2-9128-4ba1-9084-c2373299a0d9-kube-api-access-lg99b" (OuterVolumeSpecName: "kube-api-access-lg99b") pod "ca170ba2-9128-4ba1-9084-c2373299a0d9" (UID: "ca170ba2-9128-4ba1-9084-c2373299a0d9"). InnerVolumeSpecName "kube-api-access-lg99b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.268170 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3163591b-81ce-4897-8170-961affb60344-kube-api-access-xhbdv" (OuterVolumeSpecName: "kube-api-access-xhbdv") pod "3163591b-81ce-4897-8170-961affb60344" (UID: "3163591b-81ce-4897-8170-961affb60344"). InnerVolumeSpecName "kube-api-access-xhbdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:21:09 crc kubenswrapper[4693]: W1008 07:21:09.313619 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9c82bb62_a293_463f_ba14_c6fcf26e3a90.slice/crio-397f9059de46459b3acb0d1c6e4b4183812da7eef7f413ba7e49a7ed5ff3f772 WatchSource:0}: Error finding container 397f9059de46459b3acb0d1c6e4b4183812da7eef7f413ba7e49a7ed5ff3f772: Status 404 returned error can't find the container with id 397f9059de46459b3acb0d1c6e4b4183812da7eef7f413ba7e49a7ed5ff3f772 Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.314501 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nkdmr"] Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.344425 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca170ba2-9128-4ba1-9084-c2373299a0d9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ca170ba2-9128-4ba1-9084-c2373299a0d9" (UID: "ca170ba2-9128-4ba1-9084-c2373299a0d9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.362633 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3163591b-81ce-4897-8170-961affb60344-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3163591b-81ce-4897-8170-961affb60344" (UID: "3163591b-81ce-4897-8170-961affb60344"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.363879 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b509fff2-7ab7-4e9a-9057-5fda5fa66f41-catalog-content\") pod \"b509fff2-7ab7-4e9a-9057-5fda5fa66f41\" (UID: \"b509fff2-7ab7-4e9a-9057-5fda5fa66f41\") " Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.364029 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b509fff2-7ab7-4e9a-9057-5fda5fa66f41-utilities\") pod \"b509fff2-7ab7-4e9a-9057-5fda5fa66f41\" (UID: \"b509fff2-7ab7-4e9a-9057-5fda5fa66f41\") " Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.364162 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3163591b-81ce-4897-8170-961affb60344-catalog-content\") pod \"3163591b-81ce-4897-8170-961affb60344\" (UID: \"3163591b-81ce-4897-8170-961affb60344\") " Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.364209 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x985n\" (UniqueName: \"kubernetes.io/projected/b509fff2-7ab7-4e9a-9057-5fda5fa66f41-kube-api-access-x985n\") pod \"b509fff2-7ab7-4e9a-9057-5fda5fa66f41\" (UID: \"b509fff2-7ab7-4e9a-9057-5fda5fa66f41\") " Oct 08 07:21:09 crc kubenswrapper[4693]: W1008 07:21:09.364236 4693 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/3163591b-81ce-4897-8170-961affb60344/volumes/kubernetes.io~empty-dir/catalog-content Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.364264 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3163591b-81ce-4897-8170-961affb60344-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3163591b-81ce-4897-8170-961affb60344" (UID: "3163591b-81ce-4897-8170-961affb60344"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.364450 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3163591b-81ce-4897-8170-961affb60344-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.364471 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lg99b\" (UniqueName: \"kubernetes.io/projected/ca170ba2-9128-4ba1-9084-c2373299a0d9-kube-api-access-lg99b\") on node \"crc\" DevicePath \"\"" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.364487 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca170ba2-9128-4ba1-9084-c2373299a0d9-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.364498 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xhbdv\" (UniqueName: \"kubernetes.io/projected/3163591b-81ce-4897-8170-961affb60344-kube-api-access-xhbdv\") on node \"crc\" DevicePath \"\"" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.364507 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca170ba2-9128-4ba1-9084-c2373299a0d9-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.364518 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3163591b-81ce-4897-8170-961affb60344-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.365180 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b509fff2-7ab7-4e9a-9057-5fda5fa66f41-utilities" (OuterVolumeSpecName: "utilities") pod "b509fff2-7ab7-4e9a-9057-5fda5fa66f41" (UID: "b509fff2-7ab7-4e9a-9057-5fda5fa66f41"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.366989 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b509fff2-7ab7-4e9a-9057-5fda5fa66f41-kube-api-access-x985n" (OuterVolumeSpecName: "kube-api-access-x985n") pod "b509fff2-7ab7-4e9a-9057-5fda5fa66f41" (UID: "b509fff2-7ab7-4e9a-9057-5fda5fa66f41"). InnerVolumeSpecName "kube-api-access-x985n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.376304 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b509fff2-7ab7-4e9a-9057-5fda5fa66f41-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b509fff2-7ab7-4e9a-9057-5fda5fa66f41" (UID: "b509fff2-7ab7-4e9a-9057-5fda5fa66f41"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.465924 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x985n\" (UniqueName: \"kubernetes.io/projected/b509fff2-7ab7-4e9a-9057-5fda5fa66f41-kube-api-access-x985n\") on node \"crc\" DevicePath \"\"" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.465961 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b509fff2-7ab7-4e9a-9057-5fda5fa66f41-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.466104 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b509fff2-7ab7-4e9a-9057-5fda5fa66f41-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.574137 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-97z2l" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.669362 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rc5st\" (UniqueName: \"kubernetes.io/projected/a9d1eea1-3570-4937-a89b-5c6d87551b30-kube-api-access-rc5st\") pod \"a9d1eea1-3570-4937-a89b-5c6d87551b30\" (UID: \"a9d1eea1-3570-4937-a89b-5c6d87551b30\") " Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.669479 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a9d1eea1-3570-4937-a89b-5c6d87551b30-marketplace-trusted-ca\") pod \"a9d1eea1-3570-4937-a89b-5c6d87551b30\" (UID: \"a9d1eea1-3570-4937-a89b-5c6d87551b30\") " Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.669547 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a9d1eea1-3570-4937-a89b-5c6d87551b30-marketplace-operator-metrics\") pod \"a9d1eea1-3570-4937-a89b-5c6d87551b30\" (UID: \"a9d1eea1-3570-4937-a89b-5c6d87551b30\") " Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.670382 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9d1eea1-3570-4937-a89b-5c6d87551b30-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "a9d1eea1-3570-4937-a89b-5c6d87551b30" (UID: "a9d1eea1-3570-4937-a89b-5c6d87551b30"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.674408 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9d1eea1-3570-4937-a89b-5c6d87551b30-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "a9d1eea1-3570-4937-a89b-5c6d87551b30" (UID: "a9d1eea1-3570-4937-a89b-5c6d87551b30"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.674647 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9d1eea1-3570-4937-a89b-5c6d87551b30-kube-api-access-rc5st" (OuterVolumeSpecName: "kube-api-access-rc5st") pod "a9d1eea1-3570-4937-a89b-5c6d87551b30" (UID: "a9d1eea1-3570-4937-a89b-5c6d87551b30"). InnerVolumeSpecName "kube-api-access-rc5st". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.691028 4693 generic.go:334] "Generic (PLEG): container finished" podID="ca170ba2-9128-4ba1-9084-c2373299a0d9" containerID="c707c4ec02bbf1f947697bb094a864b96db3178c6ff4de34ac11b172e9ead064" exitCode=0 Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.691124 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dd458" event={"ID":"ca170ba2-9128-4ba1-9084-c2373299a0d9","Type":"ContainerDied","Data":"c707c4ec02bbf1f947697bb094a864b96db3178c6ff4de34ac11b172e9ead064"} Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.691151 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dd458" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.691196 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dd458" event={"ID":"ca170ba2-9128-4ba1-9084-c2373299a0d9","Type":"ContainerDied","Data":"e474d29fbde1da5a97f6f25309a7158665a844e18b175f40504bb6ca64d2f478"} Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.691219 4693 scope.go:117] "RemoveContainer" containerID="c707c4ec02bbf1f947697bb094a864b96db3178c6ff4de34ac11b172e9ead064" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.694199 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nkdmr" event={"ID":"9c82bb62-a293-463f-ba14-c6fcf26e3a90","Type":"ContainerStarted","Data":"3c77f5a501191f8a1b1e10c103aa1e3efd073eeb5e1ce9f1c1cf46c429d7a21a"} Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.694229 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nkdmr" event={"ID":"9c82bb62-a293-463f-ba14-c6fcf26e3a90","Type":"ContainerStarted","Data":"397f9059de46459b3acb0d1c6e4b4183812da7eef7f413ba7e49a7ed5ff3f772"} Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.694947 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-nkdmr" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.697872 4693 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-nkdmr container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.55:8080/healthz\": dial tcp 10.217.0.55:8080: connect: connection refused" start-of-body= Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.697949 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-nkdmr" podUID="9c82bb62-a293-463f-ba14-c6fcf26e3a90" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.55:8080/healthz\": dial tcp 10.217.0.55:8080: connect: connection refused" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.700458 4693 generic.go:334] "Generic (PLEG): container finished" podID="b509fff2-7ab7-4e9a-9057-5fda5fa66f41" containerID="841b757bf7c5b22f42d225e22dbb49d5b002813115e8647a5249a52a93c9b7d5" exitCode=0 Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.700555 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rxdhl" event={"ID":"b509fff2-7ab7-4e9a-9057-5fda5fa66f41","Type":"ContainerDied","Data":"841b757bf7c5b22f42d225e22dbb49d5b002813115e8647a5249a52a93c9b7d5"} Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.700597 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rxdhl" event={"ID":"b509fff2-7ab7-4e9a-9057-5fda5fa66f41","Type":"ContainerDied","Data":"7693c3b5f06ee372798c127c98d6624e4c3462981a3a2303d466b486d9b74c04"} Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.700705 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rxdhl" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.703312 4693 generic.go:334] "Generic (PLEG): container finished" podID="3163591b-81ce-4897-8170-961affb60344" containerID="6ab1cf2980b4307c554ae0a233b4cbfd66b8eeccfc08ce6b9c266dfb15ce4979" exitCode=0 Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.703381 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2q242" event={"ID":"3163591b-81ce-4897-8170-961affb60344","Type":"ContainerDied","Data":"6ab1cf2980b4307c554ae0a233b4cbfd66b8eeccfc08ce6b9c266dfb15ce4979"} Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.703414 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2q242" event={"ID":"3163591b-81ce-4897-8170-961affb60344","Type":"ContainerDied","Data":"28d2af4bf8471e3bb55410dfdf16272e4251ef8d8755118552a776ea9826af83"} Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.703495 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2q242" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.707110 4693 generic.go:334] "Generic (PLEG): container finished" podID="a9d1eea1-3570-4937-a89b-5c6d87551b30" containerID="183ed83e4617e87f634107d80c50c6ae890b6042d71a02db36f5aa999f730163" exitCode=0 Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.707250 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-97z2l" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.707360 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-97z2l" event={"ID":"a9d1eea1-3570-4937-a89b-5c6d87551b30","Type":"ContainerDied","Data":"183ed83e4617e87f634107d80c50c6ae890b6042d71a02db36f5aa999f730163"} Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.707405 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-97z2l" event={"ID":"a9d1eea1-3570-4937-a89b-5c6d87551b30","Type":"ContainerDied","Data":"887f00b338307c85ed0512b5ba6302bbefc183495b2dec8ad0b6f9e32c872783"} Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.710138 4693 scope.go:117] "RemoveContainer" containerID="805d5cfb4b68a7ba51c51241c1f8bf4bea490a4e150f5242cb13feaa79b4ce0e" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.715788 4693 generic.go:334] "Generic (PLEG): container finished" podID="b4f7b0f9-4902-4768-94ae-0c98e7814f9f" containerID="8b3f1505abd1358fec56c095d9269dc35dd62ea06fb720bb637ac660c29ae637" exitCode=0 Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.715843 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7x6qs" event={"ID":"b4f7b0f9-4902-4768-94ae-0c98e7814f9f","Type":"ContainerDied","Data":"8b3f1505abd1358fec56c095d9269dc35dd62ea06fb720bb637ac660c29ae637"} Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.716243 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dd458"] Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.720318 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dd458"] Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.731271 4693 scope.go:117] "RemoveContainer" containerID="f81ecfd9e0d88fdbf50b98fa8e52b4ea9b5f819701f34d24173d11f2a5852831" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.763395 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-nkdmr" podStartSLOduration=1.763370404 podStartE2EDuration="1.763370404s" podCreationTimestamp="2025-10-08 07:21:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:21:09.740038609 +0000 UTC m=+255.111003544" watchObservedRunningTime="2025-10-08 07:21:09.763370404 +0000 UTC m=+255.134335339" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.765953 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2q242"] Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.770639 4693 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a9d1eea1-3570-4937-a89b-5c6d87551b30-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.770673 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rc5st\" (UniqueName: \"kubernetes.io/projected/a9d1eea1-3570-4937-a89b-5c6d87551b30-kube-api-access-rc5st\") on node \"crc\" DevicePath \"\"" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.770687 4693 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a9d1eea1-3570-4937-a89b-5c6d87551b30-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.771625 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2q242"] Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.780854 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rxdhl"] Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.784488 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rxdhl"] Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.787058 4693 scope.go:117] "RemoveContainer" containerID="c707c4ec02bbf1f947697bb094a864b96db3178c6ff4de34ac11b172e9ead064" Oct 08 07:21:09 crc kubenswrapper[4693]: E1008 07:21:09.787773 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c707c4ec02bbf1f947697bb094a864b96db3178c6ff4de34ac11b172e9ead064\": container with ID starting with c707c4ec02bbf1f947697bb094a864b96db3178c6ff4de34ac11b172e9ead064 not found: ID does not exist" containerID="c707c4ec02bbf1f947697bb094a864b96db3178c6ff4de34ac11b172e9ead064" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.787836 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c707c4ec02bbf1f947697bb094a864b96db3178c6ff4de34ac11b172e9ead064"} err="failed to get container status \"c707c4ec02bbf1f947697bb094a864b96db3178c6ff4de34ac11b172e9ead064\": rpc error: code = NotFound desc = could not find container \"c707c4ec02bbf1f947697bb094a864b96db3178c6ff4de34ac11b172e9ead064\": container with ID starting with c707c4ec02bbf1f947697bb094a864b96db3178c6ff4de34ac11b172e9ead064 not found: ID does not exist" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.787870 4693 scope.go:117] "RemoveContainer" containerID="805d5cfb4b68a7ba51c51241c1f8bf4bea490a4e150f5242cb13feaa79b4ce0e" Oct 08 07:21:09 crc kubenswrapper[4693]: E1008 07:21:09.791196 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"805d5cfb4b68a7ba51c51241c1f8bf4bea490a4e150f5242cb13feaa79b4ce0e\": container with ID starting with 805d5cfb4b68a7ba51c51241c1f8bf4bea490a4e150f5242cb13feaa79b4ce0e not found: ID does not exist" containerID="805d5cfb4b68a7ba51c51241c1f8bf4bea490a4e150f5242cb13feaa79b4ce0e" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.791230 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"805d5cfb4b68a7ba51c51241c1f8bf4bea490a4e150f5242cb13feaa79b4ce0e"} err="failed to get container status \"805d5cfb4b68a7ba51c51241c1f8bf4bea490a4e150f5242cb13feaa79b4ce0e\": rpc error: code = NotFound desc = could not find container \"805d5cfb4b68a7ba51c51241c1f8bf4bea490a4e150f5242cb13feaa79b4ce0e\": container with ID starting with 805d5cfb4b68a7ba51c51241c1f8bf4bea490a4e150f5242cb13feaa79b4ce0e not found: ID does not exist" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.791249 4693 scope.go:117] "RemoveContainer" containerID="f81ecfd9e0d88fdbf50b98fa8e52b4ea9b5f819701f34d24173d11f2a5852831" Oct 08 07:21:09 crc kubenswrapper[4693]: E1008 07:21:09.791531 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f81ecfd9e0d88fdbf50b98fa8e52b4ea9b5f819701f34d24173d11f2a5852831\": container with ID starting with f81ecfd9e0d88fdbf50b98fa8e52b4ea9b5f819701f34d24173d11f2a5852831 not found: ID does not exist" containerID="f81ecfd9e0d88fdbf50b98fa8e52b4ea9b5f819701f34d24173d11f2a5852831" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.791552 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f81ecfd9e0d88fdbf50b98fa8e52b4ea9b5f819701f34d24173d11f2a5852831"} err="failed to get container status \"f81ecfd9e0d88fdbf50b98fa8e52b4ea9b5f819701f34d24173d11f2a5852831\": rpc error: code = NotFound desc = could not find container \"f81ecfd9e0d88fdbf50b98fa8e52b4ea9b5f819701f34d24173d11f2a5852831\": container with ID starting with f81ecfd9e0d88fdbf50b98fa8e52b4ea9b5f819701f34d24173d11f2a5852831 not found: ID does not exist" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.791566 4693 scope.go:117] "RemoveContainer" containerID="841b757bf7c5b22f42d225e22dbb49d5b002813115e8647a5249a52a93c9b7d5" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.797505 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-97z2l"] Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.803155 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-97z2l"] Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.811529 4693 scope.go:117] "RemoveContainer" containerID="3deb2256b9154bb0a9d61ff34d0b6b5164caf480a45cdfc81ecaf117a17077d2" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.828355 4693 scope.go:117] "RemoveContainer" containerID="e88f77d651a1a7466a0377f46cd318ff2b39a605f3cf6168453561f3c3e35af3" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.849206 4693 scope.go:117] "RemoveContainer" containerID="841b757bf7c5b22f42d225e22dbb49d5b002813115e8647a5249a52a93c9b7d5" Oct 08 07:21:09 crc kubenswrapper[4693]: E1008 07:21:09.850018 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"841b757bf7c5b22f42d225e22dbb49d5b002813115e8647a5249a52a93c9b7d5\": container with ID starting with 841b757bf7c5b22f42d225e22dbb49d5b002813115e8647a5249a52a93c9b7d5 not found: ID does not exist" containerID="841b757bf7c5b22f42d225e22dbb49d5b002813115e8647a5249a52a93c9b7d5" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.850091 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"841b757bf7c5b22f42d225e22dbb49d5b002813115e8647a5249a52a93c9b7d5"} err="failed to get container status \"841b757bf7c5b22f42d225e22dbb49d5b002813115e8647a5249a52a93c9b7d5\": rpc error: code = NotFound desc = could not find container \"841b757bf7c5b22f42d225e22dbb49d5b002813115e8647a5249a52a93c9b7d5\": container with ID starting with 841b757bf7c5b22f42d225e22dbb49d5b002813115e8647a5249a52a93c9b7d5 not found: ID does not exist" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.850131 4693 scope.go:117] "RemoveContainer" containerID="3deb2256b9154bb0a9d61ff34d0b6b5164caf480a45cdfc81ecaf117a17077d2" Oct 08 07:21:09 crc kubenswrapper[4693]: E1008 07:21:09.854100 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3deb2256b9154bb0a9d61ff34d0b6b5164caf480a45cdfc81ecaf117a17077d2\": container with ID starting with 3deb2256b9154bb0a9d61ff34d0b6b5164caf480a45cdfc81ecaf117a17077d2 not found: ID does not exist" containerID="3deb2256b9154bb0a9d61ff34d0b6b5164caf480a45cdfc81ecaf117a17077d2" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.854170 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3deb2256b9154bb0a9d61ff34d0b6b5164caf480a45cdfc81ecaf117a17077d2"} err="failed to get container status \"3deb2256b9154bb0a9d61ff34d0b6b5164caf480a45cdfc81ecaf117a17077d2\": rpc error: code = NotFound desc = could not find container \"3deb2256b9154bb0a9d61ff34d0b6b5164caf480a45cdfc81ecaf117a17077d2\": container with ID starting with 3deb2256b9154bb0a9d61ff34d0b6b5164caf480a45cdfc81ecaf117a17077d2 not found: ID does not exist" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.854221 4693 scope.go:117] "RemoveContainer" containerID="e88f77d651a1a7466a0377f46cd318ff2b39a605f3cf6168453561f3c3e35af3" Oct 08 07:21:09 crc kubenswrapper[4693]: E1008 07:21:09.855434 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e88f77d651a1a7466a0377f46cd318ff2b39a605f3cf6168453561f3c3e35af3\": container with ID starting with e88f77d651a1a7466a0377f46cd318ff2b39a605f3cf6168453561f3c3e35af3 not found: ID does not exist" containerID="e88f77d651a1a7466a0377f46cd318ff2b39a605f3cf6168453561f3c3e35af3" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.855471 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e88f77d651a1a7466a0377f46cd318ff2b39a605f3cf6168453561f3c3e35af3"} err="failed to get container status \"e88f77d651a1a7466a0377f46cd318ff2b39a605f3cf6168453561f3c3e35af3\": rpc error: code = NotFound desc = could not find container \"e88f77d651a1a7466a0377f46cd318ff2b39a605f3cf6168453561f3c3e35af3\": container with ID starting with e88f77d651a1a7466a0377f46cd318ff2b39a605f3cf6168453561f3c3e35af3 not found: ID does not exist" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.855492 4693 scope.go:117] "RemoveContainer" containerID="6ab1cf2980b4307c554ae0a233b4cbfd66b8eeccfc08ce6b9c266dfb15ce4979" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.870989 4693 scope.go:117] "RemoveContainer" containerID="f00c6d25251d1554db37f46dcb571f8d7728d9c6afb92784b8c1e756828d8f72" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.900753 4693 scope.go:117] "RemoveContainer" containerID="ef9f6f50ddd8475cf82837e128df3fd52a8bb5cc96cb66272dc4e9793657329b" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.918058 4693 scope.go:117] "RemoveContainer" containerID="6ab1cf2980b4307c554ae0a233b4cbfd66b8eeccfc08ce6b9c266dfb15ce4979" Oct 08 07:21:09 crc kubenswrapper[4693]: E1008 07:21:09.918542 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ab1cf2980b4307c554ae0a233b4cbfd66b8eeccfc08ce6b9c266dfb15ce4979\": container with ID starting with 6ab1cf2980b4307c554ae0a233b4cbfd66b8eeccfc08ce6b9c266dfb15ce4979 not found: ID does not exist" containerID="6ab1cf2980b4307c554ae0a233b4cbfd66b8eeccfc08ce6b9c266dfb15ce4979" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.918576 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ab1cf2980b4307c554ae0a233b4cbfd66b8eeccfc08ce6b9c266dfb15ce4979"} err="failed to get container status \"6ab1cf2980b4307c554ae0a233b4cbfd66b8eeccfc08ce6b9c266dfb15ce4979\": rpc error: code = NotFound desc = could not find container \"6ab1cf2980b4307c554ae0a233b4cbfd66b8eeccfc08ce6b9c266dfb15ce4979\": container with ID starting with 6ab1cf2980b4307c554ae0a233b4cbfd66b8eeccfc08ce6b9c266dfb15ce4979 not found: ID does not exist" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.918605 4693 scope.go:117] "RemoveContainer" containerID="f00c6d25251d1554db37f46dcb571f8d7728d9c6afb92784b8c1e756828d8f72" Oct 08 07:21:09 crc kubenswrapper[4693]: E1008 07:21:09.918899 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f00c6d25251d1554db37f46dcb571f8d7728d9c6afb92784b8c1e756828d8f72\": container with ID starting with f00c6d25251d1554db37f46dcb571f8d7728d9c6afb92784b8c1e756828d8f72 not found: ID does not exist" containerID="f00c6d25251d1554db37f46dcb571f8d7728d9c6afb92784b8c1e756828d8f72" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.918926 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f00c6d25251d1554db37f46dcb571f8d7728d9c6afb92784b8c1e756828d8f72"} err="failed to get container status \"f00c6d25251d1554db37f46dcb571f8d7728d9c6afb92784b8c1e756828d8f72\": rpc error: code = NotFound desc = could not find container \"f00c6d25251d1554db37f46dcb571f8d7728d9c6afb92784b8c1e756828d8f72\": container with ID starting with f00c6d25251d1554db37f46dcb571f8d7728d9c6afb92784b8c1e756828d8f72 not found: ID does not exist" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.918945 4693 scope.go:117] "RemoveContainer" containerID="ef9f6f50ddd8475cf82837e128df3fd52a8bb5cc96cb66272dc4e9793657329b" Oct 08 07:21:09 crc kubenswrapper[4693]: E1008 07:21:09.919358 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef9f6f50ddd8475cf82837e128df3fd52a8bb5cc96cb66272dc4e9793657329b\": container with ID starting with ef9f6f50ddd8475cf82837e128df3fd52a8bb5cc96cb66272dc4e9793657329b not found: ID does not exist" containerID="ef9f6f50ddd8475cf82837e128df3fd52a8bb5cc96cb66272dc4e9793657329b" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.919379 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef9f6f50ddd8475cf82837e128df3fd52a8bb5cc96cb66272dc4e9793657329b"} err="failed to get container status \"ef9f6f50ddd8475cf82837e128df3fd52a8bb5cc96cb66272dc4e9793657329b\": rpc error: code = NotFound desc = could not find container \"ef9f6f50ddd8475cf82837e128df3fd52a8bb5cc96cb66272dc4e9793657329b\": container with ID starting with ef9f6f50ddd8475cf82837e128df3fd52a8bb5cc96cb66272dc4e9793657329b not found: ID does not exist" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.919393 4693 scope.go:117] "RemoveContainer" containerID="183ed83e4617e87f634107d80c50c6ae890b6042d71a02db36f5aa999f730163" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.975224 4693 scope.go:117] "RemoveContainer" containerID="183ed83e4617e87f634107d80c50c6ae890b6042d71a02db36f5aa999f730163" Oct 08 07:21:09 crc kubenswrapper[4693]: E1008 07:21:09.975980 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"183ed83e4617e87f634107d80c50c6ae890b6042d71a02db36f5aa999f730163\": container with ID starting with 183ed83e4617e87f634107d80c50c6ae890b6042d71a02db36f5aa999f730163 not found: ID does not exist" containerID="183ed83e4617e87f634107d80c50c6ae890b6042d71a02db36f5aa999f730163" Oct 08 07:21:09 crc kubenswrapper[4693]: I1008 07:21:09.976046 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"183ed83e4617e87f634107d80c50c6ae890b6042d71a02db36f5aa999f730163"} err="failed to get container status \"183ed83e4617e87f634107d80c50c6ae890b6042d71a02db36f5aa999f730163\": rpc error: code = NotFound desc = could not find container \"183ed83e4617e87f634107d80c50c6ae890b6042d71a02db36f5aa999f730163\": container with ID starting with 183ed83e4617e87f634107d80c50c6ae890b6042d71a02db36f5aa999f730163 not found: ID does not exist" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.122910 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7x6qs" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.277546 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4f7b0f9-4902-4768-94ae-0c98e7814f9f-catalog-content\") pod \"b4f7b0f9-4902-4768-94ae-0c98e7814f9f\" (UID: \"b4f7b0f9-4902-4768-94ae-0c98e7814f9f\") " Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.277690 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jwr64\" (UniqueName: \"kubernetes.io/projected/b4f7b0f9-4902-4768-94ae-0c98e7814f9f-kube-api-access-jwr64\") pod \"b4f7b0f9-4902-4768-94ae-0c98e7814f9f\" (UID: \"b4f7b0f9-4902-4768-94ae-0c98e7814f9f\") " Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.277758 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4f7b0f9-4902-4768-94ae-0c98e7814f9f-utilities\") pod \"b4f7b0f9-4902-4768-94ae-0c98e7814f9f\" (UID: \"b4f7b0f9-4902-4768-94ae-0c98e7814f9f\") " Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.278760 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4f7b0f9-4902-4768-94ae-0c98e7814f9f-utilities" (OuterVolumeSpecName: "utilities") pod "b4f7b0f9-4902-4768-94ae-0c98e7814f9f" (UID: "b4f7b0f9-4902-4768-94ae-0c98e7814f9f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.284903 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4f7b0f9-4902-4768-94ae-0c98e7814f9f-kube-api-access-jwr64" (OuterVolumeSpecName: "kube-api-access-jwr64") pod "b4f7b0f9-4902-4768-94ae-0c98e7814f9f" (UID: "b4f7b0f9-4902-4768-94ae-0c98e7814f9f"). InnerVolumeSpecName "kube-api-access-jwr64". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.331041 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4f7b0f9-4902-4768-94ae-0c98e7814f9f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b4f7b0f9-4902-4768-94ae-0c98e7814f9f" (UID: "b4f7b0f9-4902-4768-94ae-0c98e7814f9f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.379448 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4f7b0f9-4902-4768-94ae-0c98e7814f9f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.379773 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jwr64\" (UniqueName: \"kubernetes.io/projected/b4f7b0f9-4902-4768-94ae-0c98e7814f9f-kube-api-access-jwr64\") on node \"crc\" DevicePath \"\"" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.379976 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4f7b0f9-4902-4768-94ae-0c98e7814f9f-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.725999 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7x6qs" event={"ID":"b4f7b0f9-4902-4768-94ae-0c98e7814f9f","Type":"ContainerDied","Data":"06d3b0407f968505e38d1e9020e3ac0f39e35dd34baa285ccdc38497a8da4b26"} Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.727842 4693 scope.go:117] "RemoveContainer" containerID="8b3f1505abd1358fec56c095d9269dc35dd62ea06fb720bb637ac660c29ae637" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.727863 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7x6qs" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.736205 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-nkdmr" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.765758 4693 scope.go:117] "RemoveContainer" containerID="f49c7d60130e7d4ea68cb91756e5874c619e5d7d2366538be1359eae491f9e34" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.779168 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7x6qs"] Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.788541 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-7x6qs"] Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.798044 4693 scope.go:117] "RemoveContainer" containerID="e4ac3330065a975da60f91aa31da25989fcc03bc61e34f33badb519e887d5e4e" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.811231 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-sz5qh"] Oct 08 07:21:10 crc kubenswrapper[4693]: E1008 07:21:10.811439 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9d1eea1-3570-4937-a89b-5c6d87551b30" containerName="marketplace-operator" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.811453 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9d1eea1-3570-4937-a89b-5c6d87551b30" containerName="marketplace-operator" Oct 08 07:21:10 crc kubenswrapper[4693]: E1008 07:21:10.811464 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b509fff2-7ab7-4e9a-9057-5fda5fa66f41" containerName="extract-content" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.811471 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="b509fff2-7ab7-4e9a-9057-5fda5fa66f41" containerName="extract-content" Oct 08 07:21:10 crc kubenswrapper[4693]: E1008 07:21:10.811481 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca170ba2-9128-4ba1-9084-c2373299a0d9" containerName="extract-content" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.811491 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca170ba2-9128-4ba1-9084-c2373299a0d9" containerName="extract-content" Oct 08 07:21:10 crc kubenswrapper[4693]: E1008 07:21:10.811500 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca170ba2-9128-4ba1-9084-c2373299a0d9" containerName="registry-server" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.811508 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca170ba2-9128-4ba1-9084-c2373299a0d9" containerName="registry-server" Oct 08 07:21:10 crc kubenswrapper[4693]: E1008 07:21:10.811517 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4f7b0f9-4902-4768-94ae-0c98e7814f9f" containerName="extract-utilities" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.811523 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4f7b0f9-4902-4768-94ae-0c98e7814f9f" containerName="extract-utilities" Oct 08 07:21:10 crc kubenswrapper[4693]: E1008 07:21:10.811535 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4f7b0f9-4902-4768-94ae-0c98e7814f9f" containerName="extract-content" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.811541 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4f7b0f9-4902-4768-94ae-0c98e7814f9f" containerName="extract-content" Oct 08 07:21:10 crc kubenswrapper[4693]: E1008 07:21:10.811549 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3163591b-81ce-4897-8170-961affb60344" containerName="extract-content" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.811555 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="3163591b-81ce-4897-8170-961affb60344" containerName="extract-content" Oct 08 07:21:10 crc kubenswrapper[4693]: E1008 07:21:10.811563 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3163591b-81ce-4897-8170-961affb60344" containerName="extract-utilities" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.811570 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="3163591b-81ce-4897-8170-961affb60344" containerName="extract-utilities" Oct 08 07:21:10 crc kubenswrapper[4693]: E1008 07:21:10.811579 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b509fff2-7ab7-4e9a-9057-5fda5fa66f41" containerName="extract-utilities" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.811585 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="b509fff2-7ab7-4e9a-9057-5fda5fa66f41" containerName="extract-utilities" Oct 08 07:21:10 crc kubenswrapper[4693]: E1008 07:21:10.811595 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4f7b0f9-4902-4768-94ae-0c98e7814f9f" containerName="registry-server" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.811601 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4f7b0f9-4902-4768-94ae-0c98e7814f9f" containerName="registry-server" Oct 08 07:21:10 crc kubenswrapper[4693]: E1008 07:21:10.811610 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b509fff2-7ab7-4e9a-9057-5fda5fa66f41" containerName="registry-server" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.811616 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="b509fff2-7ab7-4e9a-9057-5fda5fa66f41" containerName="registry-server" Oct 08 07:21:10 crc kubenswrapper[4693]: E1008 07:21:10.811622 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3163591b-81ce-4897-8170-961affb60344" containerName="registry-server" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.811627 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="3163591b-81ce-4897-8170-961affb60344" containerName="registry-server" Oct 08 07:21:10 crc kubenswrapper[4693]: E1008 07:21:10.811636 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca170ba2-9128-4ba1-9084-c2373299a0d9" containerName="extract-utilities" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.811642 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca170ba2-9128-4ba1-9084-c2373299a0d9" containerName="extract-utilities" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.812767 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="b509fff2-7ab7-4e9a-9057-5fda5fa66f41" containerName="registry-server" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.812793 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9d1eea1-3570-4937-a89b-5c6d87551b30" containerName="marketplace-operator" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.812826 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="3163591b-81ce-4897-8170-961affb60344" containerName="registry-server" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.812846 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca170ba2-9128-4ba1-9084-c2373299a0d9" containerName="registry-server" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.812981 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4f7b0f9-4902-4768-94ae-0c98e7814f9f" containerName="registry-server" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.837503 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sz5qh"] Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.837686 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sz5qh" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.842321 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.990865 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa717bec-7159-42c4-98b4-65eca2bd583b-catalog-content\") pod \"redhat-marketplace-sz5qh\" (UID: \"fa717bec-7159-42c4-98b4-65eca2bd583b\") " pod="openshift-marketplace/redhat-marketplace-sz5qh" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.991098 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58cpc\" (UniqueName: \"kubernetes.io/projected/fa717bec-7159-42c4-98b4-65eca2bd583b-kube-api-access-58cpc\") pod \"redhat-marketplace-sz5qh\" (UID: \"fa717bec-7159-42c4-98b4-65eca2bd583b\") " pod="openshift-marketplace/redhat-marketplace-sz5qh" Oct 08 07:21:10 crc kubenswrapper[4693]: I1008 07:21:10.991171 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa717bec-7159-42c4-98b4-65eca2bd583b-utilities\") pod \"redhat-marketplace-sz5qh\" (UID: \"fa717bec-7159-42c4-98b4-65eca2bd583b\") " pod="openshift-marketplace/redhat-marketplace-sz5qh" Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.013087 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ccgvl"] Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.018459 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ccgvl" Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.023996 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.030678 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ccgvl"] Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.092930 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa717bec-7159-42c4-98b4-65eca2bd583b-catalog-content\") pod \"redhat-marketplace-sz5qh\" (UID: \"fa717bec-7159-42c4-98b4-65eca2bd583b\") " pod="openshift-marketplace/redhat-marketplace-sz5qh" Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.093076 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58cpc\" (UniqueName: \"kubernetes.io/projected/fa717bec-7159-42c4-98b4-65eca2bd583b-kube-api-access-58cpc\") pod \"redhat-marketplace-sz5qh\" (UID: \"fa717bec-7159-42c4-98b4-65eca2bd583b\") " pod="openshift-marketplace/redhat-marketplace-sz5qh" Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.093114 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa717bec-7159-42c4-98b4-65eca2bd583b-utilities\") pod \"redhat-marketplace-sz5qh\" (UID: \"fa717bec-7159-42c4-98b4-65eca2bd583b\") " pod="openshift-marketplace/redhat-marketplace-sz5qh" Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.093597 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa717bec-7159-42c4-98b4-65eca2bd583b-catalog-content\") pod \"redhat-marketplace-sz5qh\" (UID: \"fa717bec-7159-42c4-98b4-65eca2bd583b\") " pod="openshift-marketplace/redhat-marketplace-sz5qh" Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.093801 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa717bec-7159-42c4-98b4-65eca2bd583b-utilities\") pod \"redhat-marketplace-sz5qh\" (UID: \"fa717bec-7159-42c4-98b4-65eca2bd583b\") " pod="openshift-marketplace/redhat-marketplace-sz5qh" Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.122649 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58cpc\" (UniqueName: \"kubernetes.io/projected/fa717bec-7159-42c4-98b4-65eca2bd583b-kube-api-access-58cpc\") pod \"redhat-marketplace-sz5qh\" (UID: \"fa717bec-7159-42c4-98b4-65eca2bd583b\") " pod="openshift-marketplace/redhat-marketplace-sz5qh" Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.161806 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sz5qh" Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.194736 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51-utilities\") pod \"redhat-operators-ccgvl\" (UID: \"a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51\") " pod="openshift-marketplace/redhat-operators-ccgvl" Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.194917 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51-catalog-content\") pod \"redhat-operators-ccgvl\" (UID: \"a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51\") " pod="openshift-marketplace/redhat-operators-ccgvl" Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.195001 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbrmw\" (UniqueName: \"kubernetes.io/projected/a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51-kube-api-access-xbrmw\") pod \"redhat-operators-ccgvl\" (UID: \"a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51\") " pod="openshift-marketplace/redhat-operators-ccgvl" Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.296015 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51-utilities\") pod \"redhat-operators-ccgvl\" (UID: \"a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51\") " pod="openshift-marketplace/redhat-operators-ccgvl" Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.296077 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51-catalog-content\") pod \"redhat-operators-ccgvl\" (UID: \"a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51\") " pod="openshift-marketplace/redhat-operators-ccgvl" Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.296103 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbrmw\" (UniqueName: \"kubernetes.io/projected/a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51-kube-api-access-xbrmw\") pod \"redhat-operators-ccgvl\" (UID: \"a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51\") " pod="openshift-marketplace/redhat-operators-ccgvl" Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.297216 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51-utilities\") pod \"redhat-operators-ccgvl\" (UID: \"a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51\") " pod="openshift-marketplace/redhat-operators-ccgvl" Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.297656 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51-catalog-content\") pod \"redhat-operators-ccgvl\" (UID: \"a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51\") " pod="openshift-marketplace/redhat-operators-ccgvl" Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.321666 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbrmw\" (UniqueName: \"kubernetes.io/projected/a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51-kube-api-access-xbrmw\") pod \"redhat-operators-ccgvl\" (UID: \"a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51\") " pod="openshift-marketplace/redhat-operators-ccgvl" Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.338546 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ccgvl" Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.378445 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3163591b-81ce-4897-8170-961affb60344" path="/var/lib/kubelet/pods/3163591b-81ce-4897-8170-961affb60344/volumes" Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.379142 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9d1eea1-3570-4937-a89b-5c6d87551b30" path="/var/lib/kubelet/pods/a9d1eea1-3570-4937-a89b-5c6d87551b30/volumes" Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.379616 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4f7b0f9-4902-4768-94ae-0c98e7814f9f" path="/var/lib/kubelet/pods/b4f7b0f9-4902-4768-94ae-0c98e7814f9f/volumes" Oct 08 07:21:11 crc kubenswrapper[4693]: W1008 07:21:11.380458 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfa717bec_7159_42c4_98b4_65eca2bd583b.slice/crio-01a25e2fb775456cc1d57bcc26773741420ff05d9333f5ba654356126be8279b WatchSource:0}: Error finding container 01a25e2fb775456cc1d57bcc26773741420ff05d9333f5ba654356126be8279b: Status 404 returned error can't find the container with id 01a25e2fb775456cc1d57bcc26773741420ff05d9333f5ba654356126be8279b Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.380691 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b509fff2-7ab7-4e9a-9057-5fda5fa66f41" path="/var/lib/kubelet/pods/b509fff2-7ab7-4e9a-9057-5fda5fa66f41/volumes" Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.381310 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca170ba2-9128-4ba1-9084-c2373299a0d9" path="/var/lib/kubelet/pods/ca170ba2-9128-4ba1-9084-c2373299a0d9/volumes" Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.381878 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sz5qh"] Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.567533 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ccgvl"] Oct 08 07:21:11 crc kubenswrapper[4693]: W1008 07:21:11.614835 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda44ec819_82cb_4ac2_8ac5_49dbc0cb8e51.slice/crio-2545eec0f7469c17d1087d9ecb6dd6eb662bd86a1586aecd2530a73c0d66294d WatchSource:0}: Error finding container 2545eec0f7469c17d1087d9ecb6dd6eb662bd86a1586aecd2530a73c0d66294d: Status 404 returned error can't find the container with id 2545eec0f7469c17d1087d9ecb6dd6eb662bd86a1586aecd2530a73c0d66294d Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.742231 4693 generic.go:334] "Generic (PLEG): container finished" podID="fa717bec-7159-42c4-98b4-65eca2bd583b" containerID="244decefc8f7f273e443578fc0ab7bb37608acb0b11cb153411e88ba3154ad85" exitCode=0 Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.742280 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sz5qh" event={"ID":"fa717bec-7159-42c4-98b4-65eca2bd583b","Type":"ContainerDied","Data":"244decefc8f7f273e443578fc0ab7bb37608acb0b11cb153411e88ba3154ad85"} Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.742596 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sz5qh" event={"ID":"fa717bec-7159-42c4-98b4-65eca2bd583b","Type":"ContainerStarted","Data":"01a25e2fb775456cc1d57bcc26773741420ff05d9333f5ba654356126be8279b"} Oct 08 07:21:11 crc kubenswrapper[4693]: I1008 07:21:11.747876 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ccgvl" event={"ID":"a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51","Type":"ContainerStarted","Data":"2545eec0f7469c17d1087d9ecb6dd6eb662bd86a1586aecd2530a73c0d66294d"} Oct 08 07:21:12 crc kubenswrapper[4693]: I1008 07:21:12.755174 4693 generic.go:334] "Generic (PLEG): container finished" podID="a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51" containerID="78aa2dc66012d0b3a9dcbb82256b925526afa05d7a1f9d9906d8e0b4286c27b0" exitCode=0 Oct 08 07:21:12 crc kubenswrapper[4693]: I1008 07:21:12.755341 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ccgvl" event={"ID":"a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51","Type":"ContainerDied","Data":"78aa2dc66012d0b3a9dcbb82256b925526afa05d7a1f9d9906d8e0b4286c27b0"} Oct 08 07:21:12 crc kubenswrapper[4693]: I1008 07:21:12.761594 4693 generic.go:334] "Generic (PLEG): container finished" podID="fa717bec-7159-42c4-98b4-65eca2bd583b" containerID="3b8267142d60e1a646984dad4e4058f2d4cdcdb84e019067bd20b7fc2fd8e1e7" exitCode=0 Oct 08 07:21:12 crc kubenswrapper[4693]: I1008 07:21:12.761653 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sz5qh" event={"ID":"fa717bec-7159-42c4-98b4-65eca2bd583b","Type":"ContainerDied","Data":"3b8267142d60e1a646984dad4e4058f2d4cdcdb84e019067bd20b7fc2fd8e1e7"} Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.221670 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5rw5w"] Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.223992 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5rw5w" Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.224803 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5rw5w"] Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.227346 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.326498 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6q2t\" (UniqueName: \"kubernetes.io/projected/1fd608a1-42a8-47e8-97f8-fc387766fae0-kube-api-access-f6q2t\") pod \"community-operators-5rw5w\" (UID: \"1fd608a1-42a8-47e8-97f8-fc387766fae0\") " pod="openshift-marketplace/community-operators-5rw5w" Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.326584 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fd608a1-42a8-47e8-97f8-fc387766fae0-catalog-content\") pod \"community-operators-5rw5w\" (UID: \"1fd608a1-42a8-47e8-97f8-fc387766fae0\") " pod="openshift-marketplace/community-operators-5rw5w" Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.326629 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fd608a1-42a8-47e8-97f8-fc387766fae0-utilities\") pod \"community-operators-5rw5w\" (UID: \"1fd608a1-42a8-47e8-97f8-fc387766fae0\") " pod="openshift-marketplace/community-operators-5rw5w" Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.411657 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-n4jbg"] Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.412683 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-n4jbg" Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.428162 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-n4jbg"] Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.428415 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6q2t\" (UniqueName: \"kubernetes.io/projected/1fd608a1-42a8-47e8-97f8-fc387766fae0-kube-api-access-f6q2t\") pod \"community-operators-5rw5w\" (UID: \"1fd608a1-42a8-47e8-97f8-fc387766fae0\") " pod="openshift-marketplace/community-operators-5rw5w" Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.428719 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fd608a1-42a8-47e8-97f8-fc387766fae0-catalog-content\") pod \"community-operators-5rw5w\" (UID: \"1fd608a1-42a8-47e8-97f8-fc387766fae0\") " pod="openshift-marketplace/community-operators-5rw5w" Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.428968 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fd608a1-42a8-47e8-97f8-fc387766fae0-utilities\") pod \"community-operators-5rw5w\" (UID: \"1fd608a1-42a8-47e8-97f8-fc387766fae0\") " pod="openshift-marketplace/community-operators-5rw5w" Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.429106 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.429602 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fd608a1-42a8-47e8-97f8-fc387766fae0-utilities\") pod \"community-operators-5rw5w\" (UID: \"1fd608a1-42a8-47e8-97f8-fc387766fae0\") " pod="openshift-marketplace/community-operators-5rw5w" Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.430017 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fd608a1-42a8-47e8-97f8-fc387766fae0-catalog-content\") pod \"community-operators-5rw5w\" (UID: \"1fd608a1-42a8-47e8-97f8-fc387766fae0\") " pod="openshift-marketplace/community-operators-5rw5w" Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.471913 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6q2t\" (UniqueName: \"kubernetes.io/projected/1fd608a1-42a8-47e8-97f8-fc387766fae0-kube-api-access-f6q2t\") pod \"community-operators-5rw5w\" (UID: \"1fd608a1-42a8-47e8-97f8-fc387766fae0\") " pod="openshift-marketplace/community-operators-5rw5w" Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.530795 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5331821d-f991-4245-9a76-c889657a38b8-utilities\") pod \"certified-operators-n4jbg\" (UID: \"5331821d-f991-4245-9a76-c889657a38b8\") " pod="openshift-marketplace/certified-operators-n4jbg" Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.531068 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hct9s\" (UniqueName: \"kubernetes.io/projected/5331821d-f991-4245-9a76-c889657a38b8-kube-api-access-hct9s\") pod \"certified-operators-n4jbg\" (UID: \"5331821d-f991-4245-9a76-c889657a38b8\") " pod="openshift-marketplace/certified-operators-n4jbg" Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.531232 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5331821d-f991-4245-9a76-c889657a38b8-catalog-content\") pod \"certified-operators-n4jbg\" (UID: \"5331821d-f991-4245-9a76-c889657a38b8\") " pod="openshift-marketplace/certified-operators-n4jbg" Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.567071 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5rw5w" Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.634065 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5331821d-f991-4245-9a76-c889657a38b8-catalog-content\") pod \"certified-operators-n4jbg\" (UID: \"5331821d-f991-4245-9a76-c889657a38b8\") " pod="openshift-marketplace/certified-operators-n4jbg" Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.634158 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5331821d-f991-4245-9a76-c889657a38b8-utilities\") pod \"certified-operators-n4jbg\" (UID: \"5331821d-f991-4245-9a76-c889657a38b8\") " pod="openshift-marketplace/certified-operators-n4jbg" Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.634187 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hct9s\" (UniqueName: \"kubernetes.io/projected/5331821d-f991-4245-9a76-c889657a38b8-kube-api-access-hct9s\") pod \"certified-operators-n4jbg\" (UID: \"5331821d-f991-4245-9a76-c889657a38b8\") " pod="openshift-marketplace/certified-operators-n4jbg" Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.635441 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5331821d-f991-4245-9a76-c889657a38b8-catalog-content\") pod \"certified-operators-n4jbg\" (UID: \"5331821d-f991-4245-9a76-c889657a38b8\") " pod="openshift-marketplace/certified-operators-n4jbg" Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.635660 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5331821d-f991-4245-9a76-c889657a38b8-utilities\") pod \"certified-operators-n4jbg\" (UID: \"5331821d-f991-4245-9a76-c889657a38b8\") " pod="openshift-marketplace/certified-operators-n4jbg" Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.653094 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hct9s\" (UniqueName: \"kubernetes.io/projected/5331821d-f991-4245-9a76-c889657a38b8-kube-api-access-hct9s\") pod \"certified-operators-n4jbg\" (UID: \"5331821d-f991-4245-9a76-c889657a38b8\") " pod="openshift-marketplace/certified-operators-n4jbg" Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.736517 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-n4jbg" Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.773590 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sz5qh" event={"ID":"fa717bec-7159-42c4-98b4-65eca2bd583b","Type":"ContainerStarted","Data":"d56c3caf62e33d6a36c47a3210a2a048b334bd9a3fcb9b0c926a0e8654c84334"} Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.782065 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ccgvl" event={"ID":"a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51","Type":"ContainerStarted","Data":"605ac4a22db391d3473ea1adde94d24e3444f2a985af8ec5ad012bf7473e61f0"} Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.799255 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-sz5qh" podStartSLOduration=2.396814667 podStartE2EDuration="3.799230955s" podCreationTimestamp="2025-10-08 07:21:10 +0000 UTC" firstStartedPulling="2025-10-08 07:21:11.744548288 +0000 UTC m=+257.115513223" lastFinishedPulling="2025-10-08 07:21:13.146964576 +0000 UTC m=+258.517929511" observedRunningTime="2025-10-08 07:21:13.796645231 +0000 UTC m=+259.167610166" watchObservedRunningTime="2025-10-08 07:21:13.799230955 +0000 UTC m=+259.170195890" Oct 08 07:21:13 crc kubenswrapper[4693]: I1008 07:21:13.986315 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-n4jbg"] Oct 08 07:21:13 crc kubenswrapper[4693]: W1008 07:21:13.998521 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5331821d_f991_4245_9a76_c889657a38b8.slice/crio-afdbe6aef6ab8e79a765e66e10bcddd5d72f0014ec2869ef69661005158a1a3e WatchSource:0}: Error finding container afdbe6aef6ab8e79a765e66e10bcddd5d72f0014ec2869ef69661005158a1a3e: Status 404 returned error can't find the container with id afdbe6aef6ab8e79a765e66e10bcddd5d72f0014ec2869ef69661005158a1a3e Oct 08 07:21:14 crc kubenswrapper[4693]: I1008 07:21:14.002232 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5rw5w"] Oct 08 07:21:14 crc kubenswrapper[4693]: W1008 07:21:14.015757 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1fd608a1_42a8_47e8_97f8_fc387766fae0.slice/crio-27300d5858e3e60989a0c39dbd85544e32eb7f524abac887ad65541283d0589d WatchSource:0}: Error finding container 27300d5858e3e60989a0c39dbd85544e32eb7f524abac887ad65541283d0589d: Status 404 returned error can't find the container with id 27300d5858e3e60989a0c39dbd85544e32eb7f524abac887ad65541283d0589d Oct 08 07:21:14 crc kubenswrapper[4693]: I1008 07:21:14.790101 4693 generic.go:334] "Generic (PLEG): container finished" podID="5331821d-f991-4245-9a76-c889657a38b8" containerID="b2d970041316022ec5609992c5f8ac332324d20c2dadc627a60c047b20b7a39f" exitCode=0 Oct 08 07:21:14 crc kubenswrapper[4693]: I1008 07:21:14.790213 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n4jbg" event={"ID":"5331821d-f991-4245-9a76-c889657a38b8","Type":"ContainerDied","Data":"b2d970041316022ec5609992c5f8ac332324d20c2dadc627a60c047b20b7a39f"} Oct 08 07:21:14 crc kubenswrapper[4693]: I1008 07:21:14.790533 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n4jbg" event={"ID":"5331821d-f991-4245-9a76-c889657a38b8","Type":"ContainerStarted","Data":"afdbe6aef6ab8e79a765e66e10bcddd5d72f0014ec2869ef69661005158a1a3e"} Oct 08 07:21:14 crc kubenswrapper[4693]: I1008 07:21:14.794004 4693 generic.go:334] "Generic (PLEG): container finished" podID="1fd608a1-42a8-47e8-97f8-fc387766fae0" containerID="71b557e2333b8eab5cdd19bd4faf7bf89a9615ef0f68a50784d37723ae794227" exitCode=0 Oct 08 07:21:14 crc kubenswrapper[4693]: I1008 07:21:14.794109 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5rw5w" event={"ID":"1fd608a1-42a8-47e8-97f8-fc387766fae0","Type":"ContainerDied","Data":"71b557e2333b8eab5cdd19bd4faf7bf89a9615ef0f68a50784d37723ae794227"} Oct 08 07:21:14 crc kubenswrapper[4693]: I1008 07:21:14.794167 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5rw5w" event={"ID":"1fd608a1-42a8-47e8-97f8-fc387766fae0","Type":"ContainerStarted","Data":"27300d5858e3e60989a0c39dbd85544e32eb7f524abac887ad65541283d0589d"} Oct 08 07:21:14 crc kubenswrapper[4693]: I1008 07:21:14.808312 4693 generic.go:334] "Generic (PLEG): container finished" podID="a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51" containerID="605ac4a22db391d3473ea1adde94d24e3444f2a985af8ec5ad012bf7473e61f0" exitCode=0 Oct 08 07:21:14 crc kubenswrapper[4693]: I1008 07:21:14.810694 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ccgvl" event={"ID":"a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51","Type":"ContainerDied","Data":"605ac4a22db391d3473ea1adde94d24e3444f2a985af8ec5ad012bf7473e61f0"} Oct 08 07:21:15 crc kubenswrapper[4693]: I1008 07:21:15.825952 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n4jbg" event={"ID":"5331821d-f991-4245-9a76-c889657a38b8","Type":"ContainerStarted","Data":"16b190d0a2314559d2f5d6954fc28aa81bb1729633d52b1651909e7622c6b4fb"} Oct 08 07:21:15 crc kubenswrapper[4693]: I1008 07:21:15.830479 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ccgvl" event={"ID":"a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51","Type":"ContainerStarted","Data":"5d467548840fb66de30c0e7c34258c0de0673c1a0e4ea82cef1c5f13c5f0df53"} Oct 08 07:21:15 crc kubenswrapper[4693]: I1008 07:21:15.876697 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ccgvl" podStartSLOduration=3.4155511069999998 podStartE2EDuration="5.876670881s" podCreationTimestamp="2025-10-08 07:21:10 +0000 UTC" firstStartedPulling="2025-10-08 07:21:12.758159631 +0000 UTC m=+258.129124566" lastFinishedPulling="2025-10-08 07:21:15.219279405 +0000 UTC m=+260.590244340" observedRunningTime="2025-10-08 07:21:15.875113046 +0000 UTC m=+261.246078001" watchObservedRunningTime="2025-10-08 07:21:15.876670881 +0000 UTC m=+261.247635816" Oct 08 07:21:16 crc kubenswrapper[4693]: I1008 07:21:16.838549 4693 generic.go:334] "Generic (PLEG): container finished" podID="1fd608a1-42a8-47e8-97f8-fc387766fae0" containerID="810fb65cfcec786bebcb86c8f20c86dd38990f62002150f52db5e3d2306d442d" exitCode=0 Oct 08 07:21:16 crc kubenswrapper[4693]: I1008 07:21:16.838631 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5rw5w" event={"ID":"1fd608a1-42a8-47e8-97f8-fc387766fae0","Type":"ContainerDied","Data":"810fb65cfcec786bebcb86c8f20c86dd38990f62002150f52db5e3d2306d442d"} Oct 08 07:21:16 crc kubenswrapper[4693]: I1008 07:21:16.841348 4693 generic.go:334] "Generic (PLEG): container finished" podID="5331821d-f991-4245-9a76-c889657a38b8" containerID="16b190d0a2314559d2f5d6954fc28aa81bb1729633d52b1651909e7622c6b4fb" exitCode=0 Oct 08 07:21:16 crc kubenswrapper[4693]: I1008 07:21:16.841497 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n4jbg" event={"ID":"5331821d-f991-4245-9a76-c889657a38b8","Type":"ContainerDied","Data":"16b190d0a2314559d2f5d6954fc28aa81bb1729633d52b1651909e7622c6b4fb"} Oct 08 07:21:18 crc kubenswrapper[4693]: I1008 07:21:18.864822 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5rw5w" event={"ID":"1fd608a1-42a8-47e8-97f8-fc387766fae0","Type":"ContainerStarted","Data":"a4eb5f55dad5ab14bc0bd5528fbc99b80a63f477a1c1f4fdafcd37484c7fa5f3"} Oct 08 07:21:18 crc kubenswrapper[4693]: I1008 07:21:18.867944 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n4jbg" event={"ID":"5331821d-f991-4245-9a76-c889657a38b8","Type":"ContainerStarted","Data":"1b998893d79b92629d7b8ad16d4df58302e4cfa9f67fbe83305bc0c7cc9b6d42"} Oct 08 07:21:18 crc kubenswrapper[4693]: I1008 07:21:18.888304 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5rw5w" podStartSLOduration=3.193439145 podStartE2EDuration="5.888274027s" podCreationTimestamp="2025-10-08 07:21:13 +0000 UTC" firstStartedPulling="2025-10-08 07:21:14.80274719 +0000 UTC m=+260.173712125" lastFinishedPulling="2025-10-08 07:21:17.497582072 +0000 UTC m=+262.868547007" observedRunningTime="2025-10-08 07:21:18.884959897 +0000 UTC m=+264.255924852" watchObservedRunningTime="2025-10-08 07:21:18.888274027 +0000 UTC m=+264.259238962" Oct 08 07:21:18 crc kubenswrapper[4693]: I1008 07:21:18.904331 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-n4jbg" podStartSLOduration=3.440426652 podStartE2EDuration="5.904310814s" podCreationTimestamp="2025-10-08 07:21:13 +0000 UTC" firstStartedPulling="2025-10-08 07:21:14.793840386 +0000 UTC m=+260.164805321" lastFinishedPulling="2025-10-08 07:21:17.257724548 +0000 UTC m=+262.628689483" observedRunningTime="2025-10-08 07:21:18.902498094 +0000 UTC m=+264.273463029" watchObservedRunningTime="2025-10-08 07:21:18.904310814 +0000 UTC m=+264.275275759" Oct 08 07:21:21 crc kubenswrapper[4693]: I1008 07:21:21.162197 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-sz5qh" Oct 08 07:21:21 crc kubenswrapper[4693]: I1008 07:21:21.165772 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-sz5qh" Oct 08 07:21:21 crc kubenswrapper[4693]: I1008 07:21:21.217139 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-sz5qh" Oct 08 07:21:21 crc kubenswrapper[4693]: I1008 07:21:21.339086 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ccgvl" Oct 08 07:21:21 crc kubenswrapper[4693]: I1008 07:21:21.339630 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ccgvl" Oct 08 07:21:21 crc kubenswrapper[4693]: I1008 07:21:21.379608 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ccgvl" Oct 08 07:21:21 crc kubenswrapper[4693]: I1008 07:21:21.933157 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-sz5qh" Oct 08 07:21:21 crc kubenswrapper[4693]: I1008 07:21:21.940855 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ccgvl" Oct 08 07:21:23 crc kubenswrapper[4693]: I1008 07:21:23.567720 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5rw5w" Oct 08 07:21:23 crc kubenswrapper[4693]: I1008 07:21:23.568263 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5rw5w" Oct 08 07:21:23 crc kubenswrapper[4693]: I1008 07:21:23.618765 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5rw5w" Oct 08 07:21:23 crc kubenswrapper[4693]: I1008 07:21:23.738087 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-n4jbg" Oct 08 07:21:23 crc kubenswrapper[4693]: I1008 07:21:23.738161 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-n4jbg" Oct 08 07:21:23 crc kubenswrapper[4693]: I1008 07:21:23.786098 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-n4jbg" Oct 08 07:21:23 crc kubenswrapper[4693]: I1008 07:21:23.955129 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-n4jbg" Oct 08 07:21:23 crc kubenswrapper[4693]: I1008 07:21:23.994469 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5rw5w" Oct 08 07:22:23 crc kubenswrapper[4693]: I1008 07:22:23.489708 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:22:23 crc kubenswrapper[4693]: I1008 07:22:23.490579 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:22:53 crc kubenswrapper[4693]: I1008 07:22:53.491112 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:22:53 crc kubenswrapper[4693]: I1008 07:22:53.491807 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:23:23 crc kubenswrapper[4693]: I1008 07:23:23.490224 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:23:23 crc kubenswrapper[4693]: I1008 07:23:23.491179 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:23:23 crc kubenswrapper[4693]: I1008 07:23:23.491254 4693 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 07:23:23 crc kubenswrapper[4693]: I1008 07:23:23.492261 4693 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ddfcf64630e8b7f10b94986e58e5e52c512871e23889b5f7b1fc3041780912b6"} pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 08 07:23:23 crc kubenswrapper[4693]: I1008 07:23:23.492377 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" containerID="cri-o://ddfcf64630e8b7f10b94986e58e5e52c512871e23889b5f7b1fc3041780912b6" gracePeriod=600 Oct 08 07:23:23 crc kubenswrapper[4693]: I1008 07:23:23.783132 4693 generic.go:334] "Generic (PLEG): container finished" podID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerID="ddfcf64630e8b7f10b94986e58e5e52c512871e23889b5f7b1fc3041780912b6" exitCode=0 Oct 08 07:23:23 crc kubenswrapper[4693]: I1008 07:23:23.783195 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerDied","Data":"ddfcf64630e8b7f10b94986e58e5e52c512871e23889b5f7b1fc3041780912b6"} Oct 08 07:23:23 crc kubenswrapper[4693]: I1008 07:23:23.783651 4693 scope.go:117] "RemoveContainer" containerID="f72f2ae920a06d356ffdcf06193446353ba7f19c0733d137c5983b374ccb7e75" Oct 08 07:23:24 crc kubenswrapper[4693]: I1008 07:23:24.795200 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerStarted","Data":"1f192715e7fc72c7f7c38f3d78b7db0c4e4be64fdd4d7049d590c1c31d9d85d1"} Oct 08 07:24:35 crc kubenswrapper[4693]: I1008 07:24:35.430447 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-qw4qm"] Oct 08 07:24:35 crc kubenswrapper[4693]: I1008 07:24:35.432346 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" Oct 08 07:24:35 crc kubenswrapper[4693]: I1008 07:24:35.492577 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-qw4qm"] Oct 08 07:24:35 crc kubenswrapper[4693]: I1008 07:24:35.527352 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/6904a201-be16-437e-8a06-b1cf12050fd8-registry-certificates\") pod \"image-registry-66df7c8f76-qw4qm\" (UID: \"6904a201-be16-437e-8a06-b1cf12050fd8\") " pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" Oct 08 07:24:35 crc kubenswrapper[4693]: I1008 07:24:35.527419 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/6904a201-be16-437e-8a06-b1cf12050fd8-ca-trust-extracted\") pod \"image-registry-66df7c8f76-qw4qm\" (UID: \"6904a201-be16-437e-8a06-b1cf12050fd8\") " pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" Oct 08 07:24:35 crc kubenswrapper[4693]: I1008 07:24:35.527455 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7wgs\" (UniqueName: \"kubernetes.io/projected/6904a201-be16-437e-8a06-b1cf12050fd8-kube-api-access-t7wgs\") pod \"image-registry-66df7c8f76-qw4qm\" (UID: \"6904a201-be16-437e-8a06-b1cf12050fd8\") " pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" Oct 08 07:24:35 crc kubenswrapper[4693]: I1008 07:24:35.527707 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6904a201-be16-437e-8a06-b1cf12050fd8-trusted-ca\") pod \"image-registry-66df7c8f76-qw4qm\" (UID: \"6904a201-be16-437e-8a06-b1cf12050fd8\") " pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" Oct 08 07:24:35 crc kubenswrapper[4693]: I1008 07:24:35.527904 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/6904a201-be16-437e-8a06-b1cf12050fd8-installation-pull-secrets\") pod \"image-registry-66df7c8f76-qw4qm\" (UID: \"6904a201-be16-437e-8a06-b1cf12050fd8\") " pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" Oct 08 07:24:35 crc kubenswrapper[4693]: I1008 07:24:35.527983 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6904a201-be16-437e-8a06-b1cf12050fd8-bound-sa-token\") pod \"image-registry-66df7c8f76-qw4qm\" (UID: \"6904a201-be16-437e-8a06-b1cf12050fd8\") " pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" Oct 08 07:24:35 crc kubenswrapper[4693]: I1008 07:24:35.528029 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/6904a201-be16-437e-8a06-b1cf12050fd8-registry-tls\") pod \"image-registry-66df7c8f76-qw4qm\" (UID: \"6904a201-be16-437e-8a06-b1cf12050fd8\") " pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" Oct 08 07:24:35 crc kubenswrapper[4693]: I1008 07:24:35.528091 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-qw4qm\" (UID: \"6904a201-be16-437e-8a06-b1cf12050fd8\") " pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" Oct 08 07:24:35 crc kubenswrapper[4693]: I1008 07:24:35.553568 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-qw4qm\" (UID: \"6904a201-be16-437e-8a06-b1cf12050fd8\") " pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" Oct 08 07:24:35 crc kubenswrapper[4693]: I1008 07:24:35.629758 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/6904a201-be16-437e-8a06-b1cf12050fd8-registry-certificates\") pod \"image-registry-66df7c8f76-qw4qm\" (UID: \"6904a201-be16-437e-8a06-b1cf12050fd8\") " pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" Oct 08 07:24:35 crc kubenswrapper[4693]: I1008 07:24:35.629873 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/6904a201-be16-437e-8a06-b1cf12050fd8-ca-trust-extracted\") pod \"image-registry-66df7c8f76-qw4qm\" (UID: \"6904a201-be16-437e-8a06-b1cf12050fd8\") " pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" Oct 08 07:24:35 crc kubenswrapper[4693]: I1008 07:24:35.629911 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7wgs\" (UniqueName: \"kubernetes.io/projected/6904a201-be16-437e-8a06-b1cf12050fd8-kube-api-access-t7wgs\") pod \"image-registry-66df7c8f76-qw4qm\" (UID: \"6904a201-be16-437e-8a06-b1cf12050fd8\") " pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" Oct 08 07:24:35 crc kubenswrapper[4693]: I1008 07:24:35.629995 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6904a201-be16-437e-8a06-b1cf12050fd8-trusted-ca\") pod \"image-registry-66df7c8f76-qw4qm\" (UID: \"6904a201-be16-437e-8a06-b1cf12050fd8\") " pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" Oct 08 07:24:35 crc kubenswrapper[4693]: I1008 07:24:35.630037 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/6904a201-be16-437e-8a06-b1cf12050fd8-installation-pull-secrets\") pod \"image-registry-66df7c8f76-qw4qm\" (UID: \"6904a201-be16-437e-8a06-b1cf12050fd8\") " pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" Oct 08 07:24:35 crc kubenswrapper[4693]: I1008 07:24:35.630084 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6904a201-be16-437e-8a06-b1cf12050fd8-bound-sa-token\") pod \"image-registry-66df7c8f76-qw4qm\" (UID: \"6904a201-be16-437e-8a06-b1cf12050fd8\") " pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" Oct 08 07:24:35 crc kubenswrapper[4693]: I1008 07:24:35.630115 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/6904a201-be16-437e-8a06-b1cf12050fd8-registry-tls\") pod \"image-registry-66df7c8f76-qw4qm\" (UID: \"6904a201-be16-437e-8a06-b1cf12050fd8\") " pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" Oct 08 07:24:35 crc kubenswrapper[4693]: I1008 07:24:35.630600 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/6904a201-be16-437e-8a06-b1cf12050fd8-ca-trust-extracted\") pod \"image-registry-66df7c8f76-qw4qm\" (UID: \"6904a201-be16-437e-8a06-b1cf12050fd8\") " pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" Oct 08 07:24:35 crc kubenswrapper[4693]: I1008 07:24:35.631421 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/6904a201-be16-437e-8a06-b1cf12050fd8-registry-certificates\") pod \"image-registry-66df7c8f76-qw4qm\" (UID: \"6904a201-be16-437e-8a06-b1cf12050fd8\") " pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" Oct 08 07:24:35 crc kubenswrapper[4693]: I1008 07:24:35.632101 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6904a201-be16-437e-8a06-b1cf12050fd8-trusted-ca\") pod \"image-registry-66df7c8f76-qw4qm\" (UID: \"6904a201-be16-437e-8a06-b1cf12050fd8\") " pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" Oct 08 07:24:35 crc kubenswrapper[4693]: I1008 07:24:35.637273 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/6904a201-be16-437e-8a06-b1cf12050fd8-registry-tls\") pod \"image-registry-66df7c8f76-qw4qm\" (UID: \"6904a201-be16-437e-8a06-b1cf12050fd8\") " pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" Oct 08 07:24:35 crc kubenswrapper[4693]: I1008 07:24:35.643683 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/6904a201-be16-437e-8a06-b1cf12050fd8-installation-pull-secrets\") pod \"image-registry-66df7c8f76-qw4qm\" (UID: \"6904a201-be16-437e-8a06-b1cf12050fd8\") " pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" Oct 08 07:24:35 crc kubenswrapper[4693]: I1008 07:24:35.646705 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6904a201-be16-437e-8a06-b1cf12050fd8-bound-sa-token\") pod \"image-registry-66df7c8f76-qw4qm\" (UID: \"6904a201-be16-437e-8a06-b1cf12050fd8\") " pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" Oct 08 07:24:35 crc kubenswrapper[4693]: I1008 07:24:35.648059 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7wgs\" (UniqueName: \"kubernetes.io/projected/6904a201-be16-437e-8a06-b1cf12050fd8-kube-api-access-t7wgs\") pod \"image-registry-66df7c8f76-qw4qm\" (UID: \"6904a201-be16-437e-8a06-b1cf12050fd8\") " pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" Oct 08 07:24:35 crc kubenswrapper[4693]: I1008 07:24:35.755778 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" Oct 08 07:24:36 crc kubenswrapper[4693]: I1008 07:24:36.045674 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-qw4qm"] Oct 08 07:24:36 crc kubenswrapper[4693]: I1008 07:24:36.312569 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" event={"ID":"6904a201-be16-437e-8a06-b1cf12050fd8","Type":"ContainerStarted","Data":"cb3d461b106eb023ac00575beddd3436204af77c6873fa9b5c48e4ac6ae68cdc"} Oct 08 07:24:36 crc kubenswrapper[4693]: I1008 07:24:36.313095 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" Oct 08 07:24:36 crc kubenswrapper[4693]: I1008 07:24:36.313116 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" event={"ID":"6904a201-be16-437e-8a06-b1cf12050fd8","Type":"ContainerStarted","Data":"3e196f97f07d53432db66e2182a0ad7923c9ab3140fe19f66f5de324f47c0eba"} Oct 08 07:24:36 crc kubenswrapper[4693]: I1008 07:24:36.343352 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" podStartSLOduration=1.343326357 podStartE2EDuration="1.343326357s" podCreationTimestamp="2025-10-08 07:24:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:24:36.339576476 +0000 UTC m=+461.710541451" watchObservedRunningTime="2025-10-08 07:24:36.343326357 +0000 UTC m=+461.714291322" Oct 08 07:24:55 crc kubenswrapper[4693]: I1008 07:24:55.767713 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-qw4qm" Oct 08 07:24:55 crc kubenswrapper[4693]: I1008 07:24:55.848279 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-m4zw6"] Oct 08 07:25:20 crc kubenswrapper[4693]: I1008 07:25:20.906698 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" podUID="e947463f-afe0-40a7-8f9f-b5d76d2086d0" containerName="registry" containerID="cri-o://2ade496d2bfc78a5b1cf224bca00a50add4e2e5dd6359b20616aac48ec595757" gracePeriod=30 Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.352239 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.494625 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e947463f-afe0-40a7-8f9f-b5d76d2086d0-trusted-ca\") pod \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.494679 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e947463f-afe0-40a7-8f9f-b5d76d2086d0-registry-tls\") pod \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.494709 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e947463f-afe0-40a7-8f9f-b5d76d2086d0-registry-certificates\") pod \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.494976 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.495019 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e947463f-afe0-40a7-8f9f-b5d76d2086d0-installation-pull-secrets\") pod \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.495117 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e947463f-afe0-40a7-8f9f-b5d76d2086d0-ca-trust-extracted\") pod \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.495149 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e947463f-afe0-40a7-8f9f-b5d76d2086d0-bound-sa-token\") pod \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.495174 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v4mbl\" (UniqueName: \"kubernetes.io/projected/e947463f-afe0-40a7-8f9f-b5d76d2086d0-kube-api-access-v4mbl\") pod \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\" (UID: \"e947463f-afe0-40a7-8f9f-b5d76d2086d0\") " Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.497066 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e947463f-afe0-40a7-8f9f-b5d76d2086d0-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "e947463f-afe0-40a7-8f9f-b5d76d2086d0" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.497167 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e947463f-afe0-40a7-8f9f-b5d76d2086d0-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "e947463f-afe0-40a7-8f9f-b5d76d2086d0" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.505780 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e947463f-afe0-40a7-8f9f-b5d76d2086d0-kube-api-access-v4mbl" (OuterVolumeSpecName: "kube-api-access-v4mbl") pod "e947463f-afe0-40a7-8f9f-b5d76d2086d0" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0"). InnerVolumeSpecName "kube-api-access-v4mbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.505922 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e947463f-afe0-40a7-8f9f-b5d76d2086d0-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "e947463f-afe0-40a7-8f9f-b5d76d2086d0" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.506504 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e947463f-afe0-40a7-8f9f-b5d76d2086d0-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "e947463f-afe0-40a7-8f9f-b5d76d2086d0" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.507686 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e947463f-afe0-40a7-8f9f-b5d76d2086d0-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "e947463f-afe0-40a7-8f9f-b5d76d2086d0" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.512771 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "e947463f-afe0-40a7-8f9f-b5d76d2086d0" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.532632 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e947463f-afe0-40a7-8f9f-b5d76d2086d0-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "e947463f-afe0-40a7-8f9f-b5d76d2086d0" (UID: "e947463f-afe0-40a7-8f9f-b5d76d2086d0"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.596581 4693 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e947463f-afe0-40a7-8f9f-b5d76d2086d0-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.596663 4693 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e947463f-afe0-40a7-8f9f-b5d76d2086d0-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.596685 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v4mbl\" (UniqueName: \"kubernetes.io/projected/e947463f-afe0-40a7-8f9f-b5d76d2086d0-kube-api-access-v4mbl\") on node \"crc\" DevicePath \"\"" Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.596706 4693 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e947463f-afe0-40a7-8f9f-b5d76d2086d0-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.596750 4693 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e947463f-afe0-40a7-8f9f-b5d76d2086d0-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.596769 4693 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e947463f-afe0-40a7-8f9f-b5d76d2086d0-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.596785 4693 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e947463f-afe0-40a7-8f9f-b5d76d2086d0-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.603446 4693 generic.go:334] "Generic (PLEG): container finished" podID="e947463f-afe0-40a7-8f9f-b5d76d2086d0" containerID="2ade496d2bfc78a5b1cf224bca00a50add4e2e5dd6359b20616aac48ec595757" exitCode=0 Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.603496 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" event={"ID":"e947463f-afe0-40a7-8f9f-b5d76d2086d0","Type":"ContainerDied","Data":"2ade496d2bfc78a5b1cf224bca00a50add4e2e5dd6359b20616aac48ec595757"} Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.603513 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.603543 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-m4zw6" event={"ID":"e947463f-afe0-40a7-8f9f-b5d76d2086d0","Type":"ContainerDied","Data":"04f4dbbe536199ba80e2a6febf093475698cfc850ae2b7080bb68e91e84f61f2"} Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.603597 4693 scope.go:117] "RemoveContainer" containerID="2ade496d2bfc78a5b1cf224bca00a50add4e2e5dd6359b20616aac48ec595757" Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.637740 4693 scope.go:117] "RemoveContainer" containerID="2ade496d2bfc78a5b1cf224bca00a50add4e2e5dd6359b20616aac48ec595757" Oct 08 07:25:21 crc kubenswrapper[4693]: E1008 07:25:21.640379 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ade496d2bfc78a5b1cf224bca00a50add4e2e5dd6359b20616aac48ec595757\": container with ID starting with 2ade496d2bfc78a5b1cf224bca00a50add4e2e5dd6359b20616aac48ec595757 not found: ID does not exist" containerID="2ade496d2bfc78a5b1cf224bca00a50add4e2e5dd6359b20616aac48ec595757" Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.640461 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ade496d2bfc78a5b1cf224bca00a50add4e2e5dd6359b20616aac48ec595757"} err="failed to get container status \"2ade496d2bfc78a5b1cf224bca00a50add4e2e5dd6359b20616aac48ec595757\": rpc error: code = NotFound desc = could not find container \"2ade496d2bfc78a5b1cf224bca00a50add4e2e5dd6359b20616aac48ec595757\": container with ID starting with 2ade496d2bfc78a5b1cf224bca00a50add4e2e5dd6359b20616aac48ec595757 not found: ID does not exist" Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.647966 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-m4zw6"] Oct 08 07:25:21 crc kubenswrapper[4693]: I1008 07:25:21.658032 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-m4zw6"] Oct 08 07:25:23 crc kubenswrapper[4693]: I1008 07:25:23.374138 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e947463f-afe0-40a7-8f9f-b5d76d2086d0" path="/var/lib/kubelet/pods/e947463f-afe0-40a7-8f9f-b5d76d2086d0/volumes" Oct 08 07:25:23 crc kubenswrapper[4693]: I1008 07:25:23.489771 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:25:23 crc kubenswrapper[4693]: I1008 07:25:23.489935 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:25:53 crc kubenswrapper[4693]: I1008 07:25:53.489442 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:25:53 crc kubenswrapper[4693]: I1008 07:25:53.490165 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:26:23 crc kubenswrapper[4693]: I1008 07:26:23.490072 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:26:23 crc kubenswrapper[4693]: I1008 07:26:23.490799 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:26:23 crc kubenswrapper[4693]: I1008 07:26:23.490917 4693 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 07:26:23 crc kubenswrapper[4693]: I1008 07:26:23.491909 4693 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1f192715e7fc72c7f7c38f3d78b7db0c4e4be64fdd4d7049d590c1c31d9d85d1"} pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 08 07:26:23 crc kubenswrapper[4693]: I1008 07:26:23.492033 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" containerID="cri-o://1f192715e7fc72c7f7c38f3d78b7db0c4e4be64fdd4d7049d590c1c31d9d85d1" gracePeriod=600 Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.024135 4693 generic.go:334] "Generic (PLEG): container finished" podID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerID="1f192715e7fc72c7f7c38f3d78b7db0c4e4be64fdd4d7049d590c1c31d9d85d1" exitCode=0 Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.024196 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerDied","Data":"1f192715e7fc72c7f7c38f3d78b7db0c4e4be64fdd4d7049d590c1c31d9d85d1"} Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.024476 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerStarted","Data":"9384b82d3cba6b720e3fd8f3218eba5a267966bc2aa3a68986f9c6356d620303"} Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.024510 4693 scope.go:117] "RemoveContainer" containerID="ddfcf64630e8b7f10b94986e58e5e52c512871e23889b5f7b1fc3041780912b6" Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.337177 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-55lln"] Oct 08 07:26:24 crc kubenswrapper[4693]: E1008 07:26:24.337645 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e947463f-afe0-40a7-8f9f-b5d76d2086d0" containerName="registry" Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.337662 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="e947463f-afe0-40a7-8f9f-b5d76d2086d0" containerName="registry" Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.337756 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="e947463f-afe0-40a7-8f9f-b5d76d2086d0" containerName="registry" Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.338126 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-55lln" Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.339960 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.344848 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.344894 4693 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-czr6w" Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.347730 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-69p7f"] Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.348381 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-69p7f" Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.351683 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-55lln"] Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.351791 4693 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-8lwjf" Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.357855 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-69p7f"] Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.360643 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-c4c8h"] Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.361273 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-c4c8h" Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.362521 4693 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-5mdll" Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.375759 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-c4c8h"] Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.383549 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzx6r\" (UniqueName: \"kubernetes.io/projected/8d52a50f-bc7f-4317-a82a-678905b53fcc-kube-api-access-jzx6r\") pod \"cert-manager-webhook-5655c58dd6-c4c8h\" (UID: \"8d52a50f-bc7f-4317-a82a-678905b53fcc\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-c4c8h" Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.383597 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ft8rj\" (UniqueName: \"kubernetes.io/projected/3e9c884b-9e83-4f39-b92c-c278a1a08a2a-kube-api-access-ft8rj\") pod \"cert-manager-cainjector-7f985d654d-55lln\" (UID: \"3e9c884b-9e83-4f39-b92c-c278a1a08a2a\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-55lln" Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.383651 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjwth\" (UniqueName: \"kubernetes.io/projected/c4e1c031-3b17-4c19-80f7-f37b55c3cb4a-kube-api-access-tjwth\") pod \"cert-manager-5b446d88c5-69p7f\" (UID: \"c4e1c031-3b17-4c19-80f7-f37b55c3cb4a\") " pod="cert-manager/cert-manager-5b446d88c5-69p7f" Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.485086 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjwth\" (UniqueName: \"kubernetes.io/projected/c4e1c031-3b17-4c19-80f7-f37b55c3cb4a-kube-api-access-tjwth\") pod \"cert-manager-5b446d88c5-69p7f\" (UID: \"c4e1c031-3b17-4c19-80f7-f37b55c3cb4a\") " pod="cert-manager/cert-manager-5b446d88c5-69p7f" Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.485183 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzx6r\" (UniqueName: \"kubernetes.io/projected/8d52a50f-bc7f-4317-a82a-678905b53fcc-kube-api-access-jzx6r\") pod \"cert-manager-webhook-5655c58dd6-c4c8h\" (UID: \"8d52a50f-bc7f-4317-a82a-678905b53fcc\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-c4c8h" Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.485223 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ft8rj\" (UniqueName: \"kubernetes.io/projected/3e9c884b-9e83-4f39-b92c-c278a1a08a2a-kube-api-access-ft8rj\") pod \"cert-manager-cainjector-7f985d654d-55lln\" (UID: \"3e9c884b-9e83-4f39-b92c-c278a1a08a2a\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-55lln" Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.507633 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzx6r\" (UniqueName: \"kubernetes.io/projected/8d52a50f-bc7f-4317-a82a-678905b53fcc-kube-api-access-jzx6r\") pod \"cert-manager-webhook-5655c58dd6-c4c8h\" (UID: \"8d52a50f-bc7f-4317-a82a-678905b53fcc\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-c4c8h" Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.510860 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjwth\" (UniqueName: \"kubernetes.io/projected/c4e1c031-3b17-4c19-80f7-f37b55c3cb4a-kube-api-access-tjwth\") pod \"cert-manager-5b446d88c5-69p7f\" (UID: \"c4e1c031-3b17-4c19-80f7-f37b55c3cb4a\") " pod="cert-manager/cert-manager-5b446d88c5-69p7f" Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.517453 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ft8rj\" (UniqueName: \"kubernetes.io/projected/3e9c884b-9e83-4f39-b92c-c278a1a08a2a-kube-api-access-ft8rj\") pod \"cert-manager-cainjector-7f985d654d-55lln\" (UID: \"3e9c884b-9e83-4f39-b92c-c278a1a08a2a\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-55lln" Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.651315 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-55lln" Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.660664 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-69p7f" Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.673750 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-c4c8h" Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.936851 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-55lln"] Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.947602 4693 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 08 07:26:24 crc kubenswrapper[4693]: I1008 07:26:24.988117 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-c4c8h"] Oct 08 07:26:25 crc kubenswrapper[4693]: I1008 07:26:25.031522 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-c4c8h" event={"ID":"8d52a50f-bc7f-4317-a82a-678905b53fcc","Type":"ContainerStarted","Data":"c208abca48e9d66343961cd0d48fbae97d8d25273bd6b50fd5ec69b8537512b2"} Oct 08 07:26:25 crc kubenswrapper[4693]: I1008 07:26:25.032541 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-55lln" event={"ID":"3e9c884b-9e83-4f39-b92c-c278a1a08a2a","Type":"ContainerStarted","Data":"4d2fc6a62247512414eb47770b5a7c0a33ec40ab3a5eface6a6e8045844a2ac1"} Oct 08 07:26:25 crc kubenswrapper[4693]: I1008 07:26:25.240141 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-69p7f"] Oct 08 07:26:25 crc kubenswrapper[4693]: W1008 07:26:25.244620 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc4e1c031_3b17_4c19_80f7_f37b55c3cb4a.slice/crio-bfff6f94186be058101968cb3ebd31023407dd12025440813eebfe0592d7ca29 WatchSource:0}: Error finding container bfff6f94186be058101968cb3ebd31023407dd12025440813eebfe0592d7ca29: Status 404 returned error can't find the container with id bfff6f94186be058101968cb3ebd31023407dd12025440813eebfe0592d7ca29 Oct 08 07:26:26 crc kubenswrapper[4693]: I1008 07:26:26.040139 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-69p7f" event={"ID":"c4e1c031-3b17-4c19-80f7-f37b55c3cb4a","Type":"ContainerStarted","Data":"bfff6f94186be058101968cb3ebd31023407dd12025440813eebfe0592d7ca29"} Oct 08 07:26:29 crc kubenswrapper[4693]: I1008 07:26:29.058233 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-55lln" event={"ID":"3e9c884b-9e83-4f39-b92c-c278a1a08a2a","Type":"ContainerStarted","Data":"efd72b55329565ad47b6e2f6b47e61fe0d7bacb5077d64bfd961d499194e1a1f"} Oct 08 07:26:29 crc kubenswrapper[4693]: I1008 07:26:29.062751 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-c4c8h" event={"ID":"8d52a50f-bc7f-4317-a82a-678905b53fcc","Type":"ContainerStarted","Data":"f605dac7bee02b4d2897a9a697e7ea7df9d75c0e4e66f979c6d997d216a9d754"} Oct 08 07:26:29 crc kubenswrapper[4693]: I1008 07:26:29.063836 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-c4c8h" Oct 08 07:26:29 crc kubenswrapper[4693]: I1008 07:26:29.067422 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-69p7f" event={"ID":"c4e1c031-3b17-4c19-80f7-f37b55c3cb4a","Type":"ContainerStarted","Data":"ab61c70b37ff037b8dd067e4bdc7d1861e824a8e02d1bb968a93afe04260fae9"} Oct 08 07:26:29 crc kubenswrapper[4693]: I1008 07:26:29.083257 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-55lln" podStartSLOduration=1.75574476 podStartE2EDuration="5.083233223s" podCreationTimestamp="2025-10-08 07:26:24 +0000 UTC" firstStartedPulling="2025-10-08 07:26:24.947254507 +0000 UTC m=+570.318219462" lastFinishedPulling="2025-10-08 07:26:28.27474299 +0000 UTC m=+573.645707925" observedRunningTime="2025-10-08 07:26:29.082943006 +0000 UTC m=+574.453907961" watchObservedRunningTime="2025-10-08 07:26:29.083233223 +0000 UTC m=+574.454198168" Oct 08 07:26:29 crc kubenswrapper[4693]: I1008 07:26:29.104942 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-69p7f" podStartSLOduration=2.148390492 podStartE2EDuration="5.104914514s" podCreationTimestamp="2025-10-08 07:26:24 +0000 UTC" firstStartedPulling="2025-10-08 07:26:25.246693884 +0000 UTC m=+570.617658819" lastFinishedPulling="2025-10-08 07:26:28.203217886 +0000 UTC m=+573.574182841" observedRunningTime="2025-10-08 07:26:29.102224582 +0000 UTC m=+574.473189537" watchObservedRunningTime="2025-10-08 07:26:29.104914514 +0000 UTC m=+574.475879479" Oct 08 07:26:29 crc kubenswrapper[4693]: I1008 07:26:29.127579 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-c4c8h" podStartSLOduration=1.994488552 podStartE2EDuration="5.12755184s" podCreationTimestamp="2025-10-08 07:26:24 +0000 UTC" firstStartedPulling="2025-10-08 07:26:24.996949248 +0000 UTC m=+570.367914183" lastFinishedPulling="2025-10-08 07:26:28.130012546 +0000 UTC m=+573.500977471" observedRunningTime="2025-10-08 07:26:29.124508609 +0000 UTC m=+574.495473574" watchObservedRunningTime="2025-10-08 07:26:29.12755184 +0000 UTC m=+574.498516805" Oct 08 07:26:34 crc kubenswrapper[4693]: I1008 07:26:34.652667 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-snt7l"] Oct 08 07:26:34 crc kubenswrapper[4693]: I1008 07:26:34.653920 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="ovn-controller" containerID="cri-o://2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a" gracePeriod=30 Oct 08 07:26:34 crc kubenswrapper[4693]: I1008 07:26:34.654015 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="nbdb" containerID="cri-o://c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9" gracePeriod=30 Oct 08 07:26:34 crc kubenswrapper[4693]: I1008 07:26:34.654118 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="northd" containerID="cri-o://38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2" gracePeriod=30 Oct 08 07:26:34 crc kubenswrapper[4693]: I1008 07:26:34.654194 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344" gracePeriod=30 Oct 08 07:26:34 crc kubenswrapper[4693]: I1008 07:26:34.654255 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="kube-rbac-proxy-node" containerID="cri-o://891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da" gracePeriod=30 Oct 08 07:26:34 crc kubenswrapper[4693]: I1008 07:26:34.654314 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="ovn-acl-logging" containerID="cri-o://2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc" gracePeriod=30 Oct 08 07:26:34 crc kubenswrapper[4693]: I1008 07:26:34.654546 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="sbdb" containerID="cri-o://9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9" gracePeriod=30 Oct 08 07:26:34 crc kubenswrapper[4693]: I1008 07:26:34.701577 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-c4c8h" Oct 08 07:26:34 crc kubenswrapper[4693]: I1008 07:26:34.715344 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="ovnkube-controller" containerID="cri-o://0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd" gracePeriod=30 Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.000947 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-snt7l_379c61a3-51ff-4bdf-ab8b-5af8bf090716/ovnkube-controller/3.log" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.003272 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-snt7l_379c61a3-51ff-4bdf-ab8b-5af8bf090716/ovn-acl-logging/0.log" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.003714 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-snt7l_379c61a3-51ff-4bdf-ab8b-5af8bf090716/ovn-controller/0.log" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.004177 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.030317 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-run-ovn-kubernetes\") pod \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.030408 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-run-netns\") pod \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.030480 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-var-lib-openvswitch\") pod \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.030478 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "379c61a3-51ff-4bdf-ab8b-5af8bf090716" (UID: "379c61a3-51ff-4bdf-ab8b-5af8bf090716"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.030550 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/379c61a3-51ff-4bdf-ab8b-5af8bf090716-ovnkube-config\") pod \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.030557 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "379c61a3-51ff-4bdf-ab8b-5af8bf090716" (UID: "379c61a3-51ff-4bdf-ab8b-5af8bf090716"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.030581 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-slash\") pod \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.030612 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "379c61a3-51ff-4bdf-ab8b-5af8bf090716" (UID: "379c61a3-51ff-4bdf-ab8b-5af8bf090716"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.030638 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/379c61a3-51ff-4bdf-ab8b-5af8bf090716-ovnkube-script-lib\") pod \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.030709 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-log-socket\") pod \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.030735 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-run-openvswitch\") pod \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.030785 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-kubelet\") pod \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.030788 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-slash" (OuterVolumeSpecName: "host-slash") pod "379c61a3-51ff-4bdf-ab8b-5af8bf090716" (UID: "379c61a3-51ff-4bdf-ab8b-5af8bf090716"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.030835 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-systemd-units\") pod \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.030850 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-log-socket" (OuterVolumeSpecName: "log-socket") pod "379c61a3-51ff-4bdf-ab8b-5af8bf090716" (UID: "379c61a3-51ff-4bdf-ab8b-5af8bf090716"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.030875 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "379c61a3-51ff-4bdf-ab8b-5af8bf090716" (UID: "379c61a3-51ff-4bdf-ab8b-5af8bf090716"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.030868 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lqs6f\" (UniqueName: \"kubernetes.io/projected/379c61a3-51ff-4bdf-ab8b-5af8bf090716-kube-api-access-lqs6f\") pod \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.030905 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "379c61a3-51ff-4bdf-ab8b-5af8bf090716" (UID: "379c61a3-51ff-4bdf-ab8b-5af8bf090716"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.030904 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "379c61a3-51ff-4bdf-ab8b-5af8bf090716" (UID: "379c61a3-51ff-4bdf-ab8b-5af8bf090716"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.030975 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-run-systemd\") pod \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.031040 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-run-ovn\") pod \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.031082 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/379c61a3-51ff-4bdf-ab8b-5af8bf090716-ovn-node-metrics-cert\") pod \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.031118 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/379c61a3-51ff-4bdf-ab8b-5af8bf090716-env-overrides\") pod \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.031158 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-cni-netd\") pod \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.031182 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/379c61a3-51ff-4bdf-ab8b-5af8bf090716-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "379c61a3-51ff-4bdf-ab8b-5af8bf090716" (UID: "379c61a3-51ff-4bdf-ab8b-5af8bf090716"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.031206 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-etc-openvswitch\") pod \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.031241 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-node-log\") pod \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.031173 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "379c61a3-51ff-4bdf-ab8b-5af8bf090716" (UID: "379c61a3-51ff-4bdf-ab8b-5af8bf090716"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.031256 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "379c61a3-51ff-4bdf-ab8b-5af8bf090716" (UID: "379c61a3-51ff-4bdf-ab8b-5af8bf090716"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.031276 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "379c61a3-51ff-4bdf-ab8b-5af8bf090716" (UID: "379c61a3-51ff-4bdf-ab8b-5af8bf090716"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.031282 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-var-lib-cni-networks-ovn-kubernetes\") pod \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.031323 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "379c61a3-51ff-4bdf-ab8b-5af8bf090716" (UID: "379c61a3-51ff-4bdf-ab8b-5af8bf090716"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.031355 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-cni-bin\") pod \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\" (UID: \"379c61a3-51ff-4bdf-ab8b-5af8bf090716\") " Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.031367 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-node-log" (OuterVolumeSpecName: "node-log") pod "379c61a3-51ff-4bdf-ab8b-5af8bf090716" (UID: "379c61a3-51ff-4bdf-ab8b-5af8bf090716"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.031511 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "379c61a3-51ff-4bdf-ab8b-5af8bf090716" (UID: "379c61a3-51ff-4bdf-ab8b-5af8bf090716"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.031697 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/379c61a3-51ff-4bdf-ab8b-5af8bf090716-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "379c61a3-51ff-4bdf-ab8b-5af8bf090716" (UID: "379c61a3-51ff-4bdf-ab8b-5af8bf090716"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.031911 4693 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-kubelet\") on node \"crc\" DevicePath \"\"" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.031939 4693 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-systemd-units\") on node \"crc\" DevicePath \"\"" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.031958 4693 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.031976 4693 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/379c61a3-51ff-4bdf-ab8b-5af8bf090716-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.031994 4693 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-cni-netd\") on node \"crc\" DevicePath \"\"" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.032011 4693 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.032028 4693 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-node-log\") on node \"crc\" DevicePath \"\"" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.032047 4693 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.032067 4693 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-cni-bin\") on node \"crc\" DevicePath \"\"" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.032087 4693 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.032106 4693 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-run-netns\") on node \"crc\" DevicePath \"\"" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.032125 4693 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.032128 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/379c61a3-51ff-4bdf-ab8b-5af8bf090716-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "379c61a3-51ff-4bdf-ab8b-5af8bf090716" (UID: "379c61a3-51ff-4bdf-ab8b-5af8bf090716"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.032142 4693 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/379c61a3-51ff-4bdf-ab8b-5af8bf090716-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.032160 4693 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-host-slash\") on node \"crc\" DevicePath \"\"" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.032177 4693 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-log-socket\") on node \"crc\" DevicePath \"\"" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.032195 4693 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-run-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.035937 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/379c61a3-51ff-4bdf-ab8b-5af8bf090716-kube-api-access-lqs6f" (OuterVolumeSpecName: "kube-api-access-lqs6f") pod "379c61a3-51ff-4bdf-ab8b-5af8bf090716" (UID: "379c61a3-51ff-4bdf-ab8b-5af8bf090716"). InnerVolumeSpecName "kube-api-access-lqs6f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.040363 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/379c61a3-51ff-4bdf-ab8b-5af8bf090716-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "379c61a3-51ff-4bdf-ab8b-5af8bf090716" (UID: "379c61a3-51ff-4bdf-ab8b-5af8bf090716"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.046586 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "379c61a3-51ff-4bdf-ab8b-5af8bf090716" (UID: "379c61a3-51ff-4bdf-ab8b-5af8bf090716"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.057052 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-8h5s7"] Oct 08 07:26:35 crc kubenswrapper[4693]: E1008 07:26:35.057317 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="ovn-acl-logging" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.057331 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="ovn-acl-logging" Oct 08 07:26:35 crc kubenswrapper[4693]: E1008 07:26:35.057339 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="nbdb" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.057344 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="nbdb" Oct 08 07:26:35 crc kubenswrapper[4693]: E1008 07:26:35.057357 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="northd" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.057364 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="northd" Oct 08 07:26:35 crc kubenswrapper[4693]: E1008 07:26:35.057371 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="ovn-controller" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.057394 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="ovn-controller" Oct 08 07:26:35 crc kubenswrapper[4693]: E1008 07:26:35.057404 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="ovnkube-controller" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.057410 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="ovnkube-controller" Oct 08 07:26:35 crc kubenswrapper[4693]: E1008 07:26:35.057416 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="ovnkube-controller" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.057422 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="ovnkube-controller" Oct 08 07:26:35 crc kubenswrapper[4693]: E1008 07:26:35.057430 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="sbdb" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.057436 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="sbdb" Oct 08 07:26:35 crc kubenswrapper[4693]: E1008 07:26:35.057444 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="kube-rbac-proxy-node" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.057450 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="kube-rbac-proxy-node" Oct 08 07:26:35 crc kubenswrapper[4693]: E1008 07:26:35.057477 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="ovnkube-controller" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.057483 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="ovnkube-controller" Oct 08 07:26:35 crc kubenswrapper[4693]: E1008 07:26:35.057491 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="ovnkube-controller" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.057497 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="ovnkube-controller" Oct 08 07:26:35 crc kubenswrapper[4693]: E1008 07:26:35.057506 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="kube-rbac-proxy-ovn-metrics" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.057512 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="kube-rbac-proxy-ovn-metrics" Oct 08 07:26:35 crc kubenswrapper[4693]: E1008 07:26:35.057520 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="kubecfg-setup" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.057526 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="kubecfg-setup" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.057650 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="northd" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.057662 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="ovnkube-controller" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.057670 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="kube-rbac-proxy-node" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.057676 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="ovnkube-controller" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.057683 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="nbdb" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.057710 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="kube-rbac-proxy-ovn-metrics" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.057720 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="ovnkube-controller" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.057727 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="ovnkube-controller" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.057733 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="ovn-acl-logging" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.057740 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="ovn-controller" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.057747 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="sbdb" Oct 08 07:26:35 crc kubenswrapper[4693]: E1008 07:26:35.057941 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="ovnkube-controller" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.057949 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="ovnkube-controller" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.058059 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerName="ovnkube-controller" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.060218 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.108000 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bfhs8_8ddc214e-6569-4b0e-8783-f484a001ce6a/kube-multus/2.log" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.108548 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bfhs8_8ddc214e-6569-4b0e-8783-f484a001ce6a/kube-multus/1.log" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.108602 4693 generic.go:334] "Generic (PLEG): container finished" podID="8ddc214e-6569-4b0e-8783-f484a001ce6a" containerID="4265598bf83e8b88e476a3eae9245760e378f6d7351187cc4599c3af2f31f4c8" exitCode=2 Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.108698 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bfhs8" event={"ID":"8ddc214e-6569-4b0e-8783-f484a001ce6a","Type":"ContainerDied","Data":"4265598bf83e8b88e476a3eae9245760e378f6d7351187cc4599c3af2f31f4c8"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.108733 4693 scope.go:117] "RemoveContainer" containerID="07051e612172de072a51e3a44710a13a800e43665c0a76d52b1989d34cf85d3a" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.109357 4693 scope.go:117] "RemoveContainer" containerID="4265598bf83e8b88e476a3eae9245760e378f6d7351187cc4599c3af2f31f4c8" Oct 08 07:26:35 crc kubenswrapper[4693]: E1008 07:26:35.109747 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-bfhs8_openshift-multus(8ddc214e-6569-4b0e-8783-f484a001ce6a)\"" pod="openshift-multus/multus-bfhs8" podUID="8ddc214e-6569-4b0e-8783-f484a001ce6a" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.111942 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-snt7l_379c61a3-51ff-4bdf-ab8b-5af8bf090716/ovnkube-controller/3.log" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.114983 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-snt7l_379c61a3-51ff-4bdf-ab8b-5af8bf090716/ovn-acl-logging/0.log" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.115649 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-snt7l_379c61a3-51ff-4bdf-ab8b-5af8bf090716/ovn-controller/0.log" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116212 4693 generic.go:334] "Generic (PLEG): container finished" podID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerID="0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd" exitCode=0 Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116229 4693 generic.go:334] "Generic (PLEG): container finished" podID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerID="9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9" exitCode=0 Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116237 4693 generic.go:334] "Generic (PLEG): container finished" podID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerID="c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9" exitCode=0 Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116245 4693 generic.go:334] "Generic (PLEG): container finished" podID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerID="38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2" exitCode=0 Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116251 4693 generic.go:334] "Generic (PLEG): container finished" podID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerID="c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344" exitCode=0 Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116258 4693 generic.go:334] "Generic (PLEG): container finished" podID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerID="891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da" exitCode=0 Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116266 4693 generic.go:334] "Generic (PLEG): container finished" podID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerID="2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc" exitCode=143 Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116274 4693 generic.go:334] "Generic (PLEG): container finished" podID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" containerID="2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a" exitCode=143 Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116303 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" event={"ID":"379c61a3-51ff-4bdf-ab8b-5af8bf090716","Type":"ContainerDied","Data":"0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116344 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" event={"ID":"379c61a3-51ff-4bdf-ab8b-5af8bf090716","Type":"ContainerDied","Data":"9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116372 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" event={"ID":"379c61a3-51ff-4bdf-ab8b-5af8bf090716","Type":"ContainerDied","Data":"c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116403 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" event={"ID":"379c61a3-51ff-4bdf-ab8b-5af8bf090716","Type":"ContainerDied","Data":"38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116423 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" event={"ID":"379c61a3-51ff-4bdf-ab8b-5af8bf090716","Type":"ContainerDied","Data":"c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116442 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" event={"ID":"379c61a3-51ff-4bdf-ab8b-5af8bf090716","Type":"ContainerDied","Data":"891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116461 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116479 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116491 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116502 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116513 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116524 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116536 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116546 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116556 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116567 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116582 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" event={"ID":"379c61a3-51ff-4bdf-ab8b-5af8bf090716","Type":"ContainerDied","Data":"2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116597 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116609 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116624 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116371 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116636 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116758 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116777 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116786 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116795 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116802 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116829 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116868 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" event={"ID":"379c61a3-51ff-4bdf-ab8b-5af8bf090716","Type":"ContainerDied","Data":"2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116889 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116899 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116906 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116914 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116922 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116930 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116937 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116946 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116953 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116960 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116971 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-snt7l" event={"ID":"379c61a3-51ff-4bdf-ab8b-5af8bf090716","Type":"ContainerDied","Data":"9e1f8fff952647c2eae83b9c29e2beb44f42fbc10a303450e6cf5c2410b3cc40"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116982 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.116993 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.117001 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.117009 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.117017 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.117025 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.117033 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.117041 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.117051 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.117059 4693 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c"} Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.133040 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/bd278adc-acef-47df-84e3-b4e57708e2c5-ovnkube-script-lib\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.133312 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-host-run-ovn-kubernetes\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.133338 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-var-lib-openvswitch\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.133372 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gdsd\" (UniqueName: \"kubernetes.io/projected/bd278adc-acef-47df-84e3-b4e57708e2c5-kube-api-access-9gdsd\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.133390 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-host-cni-netd\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.133511 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-log-socket\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.133531 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-run-openvswitch\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.133613 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bd278adc-acef-47df-84e3-b4e57708e2c5-ovnkube-config\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.133728 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-host-slash\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.133833 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-run-systemd\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.133881 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bd278adc-acef-47df-84e3-b4e57708e2c5-ovn-node-metrics-cert\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.133926 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-run-ovn\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.133957 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-systemd-units\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.134075 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.134113 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-node-log\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.134248 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bd278adc-acef-47df-84e3-b4e57708e2c5-env-overrides\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.134351 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-host-kubelet\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.134460 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-host-cni-bin\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.134550 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-etc-openvswitch\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.134628 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-host-run-netns\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.134868 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lqs6f\" (UniqueName: \"kubernetes.io/projected/379c61a3-51ff-4bdf-ab8b-5af8bf090716-kube-api-access-lqs6f\") on node \"crc\" DevicePath \"\"" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.134886 4693 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/379c61a3-51ff-4bdf-ab8b-5af8bf090716-run-systemd\") on node \"crc\" DevicePath \"\"" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.134939 4693 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/379c61a3-51ff-4bdf-ab8b-5af8bf090716-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.134956 4693 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/379c61a3-51ff-4bdf-ab8b-5af8bf090716-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.147375 4693 scope.go:117] "RemoveContainer" containerID="0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.169303 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-snt7l"] Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.173350 4693 scope.go:117] "RemoveContainer" containerID="96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.174338 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-snt7l"] Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.192217 4693 scope.go:117] "RemoveContainer" containerID="9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.210086 4693 scope.go:117] "RemoveContainer" containerID="c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.224973 4693 scope.go:117] "RemoveContainer" containerID="38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.236541 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gdsd\" (UniqueName: \"kubernetes.io/projected/bd278adc-acef-47df-84e3-b4e57708e2c5-kube-api-access-9gdsd\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.236603 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-host-cni-netd\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.236641 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-log-socket\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.236671 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-run-openvswitch\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.236714 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bd278adc-acef-47df-84e3-b4e57708e2c5-ovnkube-config\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.236747 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-host-cni-netd\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.236750 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-host-slash\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.236796 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-host-slash\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.236846 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-run-openvswitch\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.236884 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-run-systemd\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.236922 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-run-systemd\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.236925 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bd278adc-acef-47df-84e3-b4e57708e2c5-ovn-node-metrics-cert\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.236979 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-run-ovn\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.237001 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-systemd-units\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.237023 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.237048 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-node-log\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.237083 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bd278adc-acef-47df-84e3-b4e57708e2c5-env-overrides\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.237126 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-host-kubelet\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.237147 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-systemd-units\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.237166 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-node-log\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.237180 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-host-cni-bin\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.237133 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-run-ovn\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.237152 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-host-cni-bin\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.237210 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-host-kubelet\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.237249 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-etc-openvswitch\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.237284 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-host-run-netns\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.237312 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-etc-openvswitch\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.237328 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/bd278adc-acef-47df-84e3-b4e57708e2c5-ovnkube-script-lib\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.237403 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-host-run-netns\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.237442 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-host-run-ovn-kubernetes\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.237498 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-var-lib-openvswitch\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.237551 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-host-run-ovn-kubernetes\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.237594 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bd278adc-acef-47df-84e3-b4e57708e2c5-env-overrides\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.237632 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-var-lib-openvswitch\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.237974 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bd278adc-acef-47df-84e3-b4e57708e2c5-ovnkube-config\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.238050 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-log-socket\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.238206 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/bd278adc-acef-47df-84e3-b4e57708e2c5-ovnkube-script-lib\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.238287 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bd278adc-acef-47df-84e3-b4e57708e2c5-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.240539 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bd278adc-acef-47df-84e3-b4e57708e2c5-ovn-node-metrics-cert\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.242064 4693 scope.go:117] "RemoveContainer" containerID="c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.258308 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gdsd\" (UniqueName: \"kubernetes.io/projected/bd278adc-acef-47df-84e3-b4e57708e2c5-kube-api-access-9gdsd\") pod \"ovnkube-node-8h5s7\" (UID: \"bd278adc-acef-47df-84e3-b4e57708e2c5\") " pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.267097 4693 scope.go:117] "RemoveContainer" containerID="891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.283546 4693 scope.go:117] "RemoveContainer" containerID="2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.302333 4693 scope.go:117] "RemoveContainer" containerID="2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.323088 4693 scope.go:117] "RemoveContainer" containerID="4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.341386 4693 scope.go:117] "RemoveContainer" containerID="0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd" Oct 08 07:26:35 crc kubenswrapper[4693]: E1008 07:26:35.341770 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd\": container with ID starting with 0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd not found: ID does not exist" containerID="0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.341835 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd"} err="failed to get container status \"0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd\": rpc error: code = NotFound desc = could not find container \"0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd\": container with ID starting with 0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.341869 4693 scope.go:117] "RemoveContainer" containerID="96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc" Oct 08 07:26:35 crc kubenswrapper[4693]: E1008 07:26:35.342455 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc\": container with ID starting with 96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc not found: ID does not exist" containerID="96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.342493 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc"} err="failed to get container status \"96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc\": rpc error: code = NotFound desc = could not find container \"96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc\": container with ID starting with 96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.342518 4693 scope.go:117] "RemoveContainer" containerID="9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9" Oct 08 07:26:35 crc kubenswrapper[4693]: E1008 07:26:35.343054 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\": container with ID starting with 9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9 not found: ID does not exist" containerID="9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.343142 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9"} err="failed to get container status \"9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\": rpc error: code = NotFound desc = could not find container \"9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\": container with ID starting with 9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9 not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.343199 4693 scope.go:117] "RemoveContainer" containerID="c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9" Oct 08 07:26:35 crc kubenswrapper[4693]: E1008 07:26:35.343680 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\": container with ID starting with c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9 not found: ID does not exist" containerID="c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.343717 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9"} err="failed to get container status \"c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\": rpc error: code = NotFound desc = could not find container \"c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\": container with ID starting with c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9 not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.343737 4693 scope.go:117] "RemoveContainer" containerID="38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2" Oct 08 07:26:35 crc kubenswrapper[4693]: E1008 07:26:35.344262 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\": container with ID starting with 38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2 not found: ID does not exist" containerID="38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.344290 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2"} err="failed to get container status \"38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\": rpc error: code = NotFound desc = could not find container \"38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\": container with ID starting with 38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2 not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.344308 4693 scope.go:117] "RemoveContainer" containerID="c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344" Oct 08 07:26:35 crc kubenswrapper[4693]: E1008 07:26:35.344634 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\": container with ID starting with c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344 not found: ID does not exist" containerID="c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.344660 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344"} err="failed to get container status \"c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\": rpc error: code = NotFound desc = could not find container \"c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\": container with ID starting with c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344 not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.344680 4693 scope.go:117] "RemoveContainer" containerID="891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da" Oct 08 07:26:35 crc kubenswrapper[4693]: E1008 07:26:35.345187 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\": container with ID starting with 891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da not found: ID does not exist" containerID="891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.345214 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da"} err="failed to get container status \"891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\": rpc error: code = NotFound desc = could not find container \"891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\": container with ID starting with 891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.345233 4693 scope.go:117] "RemoveContainer" containerID="2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc" Oct 08 07:26:35 crc kubenswrapper[4693]: E1008 07:26:35.345731 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\": container with ID starting with 2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc not found: ID does not exist" containerID="2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.345784 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc"} err="failed to get container status \"2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\": rpc error: code = NotFound desc = could not find container \"2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\": container with ID starting with 2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.345817 4693 scope.go:117] "RemoveContainer" containerID="2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a" Oct 08 07:26:35 crc kubenswrapper[4693]: E1008 07:26:35.346343 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\": container with ID starting with 2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a not found: ID does not exist" containerID="2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.346375 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a"} err="failed to get container status \"2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\": rpc error: code = NotFound desc = could not find container \"2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\": container with ID starting with 2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.346399 4693 scope.go:117] "RemoveContainer" containerID="4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c" Oct 08 07:26:35 crc kubenswrapper[4693]: E1008 07:26:35.346694 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\": container with ID starting with 4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c not found: ID does not exist" containerID="4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.346743 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c"} err="failed to get container status \"4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\": rpc error: code = NotFound desc = could not find container \"4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\": container with ID starting with 4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.346774 4693 scope.go:117] "RemoveContainer" containerID="0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.347233 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd"} err="failed to get container status \"0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd\": rpc error: code = NotFound desc = could not find container \"0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd\": container with ID starting with 0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.347261 4693 scope.go:117] "RemoveContainer" containerID="96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.347852 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc"} err="failed to get container status \"96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc\": rpc error: code = NotFound desc = could not find container \"96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc\": container with ID starting with 96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.347907 4693 scope.go:117] "RemoveContainer" containerID="9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.348252 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9"} err="failed to get container status \"9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\": rpc error: code = NotFound desc = could not find container \"9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\": container with ID starting with 9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9 not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.348281 4693 scope.go:117] "RemoveContainer" containerID="c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.348629 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9"} err="failed to get container status \"c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\": rpc error: code = NotFound desc = could not find container \"c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\": container with ID starting with c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9 not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.348647 4693 scope.go:117] "RemoveContainer" containerID="38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.349050 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2"} err="failed to get container status \"38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\": rpc error: code = NotFound desc = could not find container \"38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\": container with ID starting with 38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2 not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.349093 4693 scope.go:117] "RemoveContainer" containerID="c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.349530 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344"} err="failed to get container status \"c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\": rpc error: code = NotFound desc = could not find container \"c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\": container with ID starting with c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344 not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.349558 4693 scope.go:117] "RemoveContainer" containerID="891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.350080 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da"} err="failed to get container status \"891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\": rpc error: code = NotFound desc = could not find container \"891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\": container with ID starting with 891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.350136 4693 scope.go:117] "RemoveContainer" containerID="2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.350588 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc"} err="failed to get container status \"2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\": rpc error: code = NotFound desc = could not find container \"2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\": container with ID starting with 2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.350614 4693 scope.go:117] "RemoveContainer" containerID="2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.351117 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a"} err="failed to get container status \"2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\": rpc error: code = NotFound desc = could not find container \"2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\": container with ID starting with 2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.351142 4693 scope.go:117] "RemoveContainer" containerID="4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.351618 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c"} err="failed to get container status \"4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\": rpc error: code = NotFound desc = could not find container \"4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\": container with ID starting with 4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.351644 4693 scope.go:117] "RemoveContainer" containerID="0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.352101 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd"} err="failed to get container status \"0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd\": rpc error: code = NotFound desc = could not find container \"0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd\": container with ID starting with 0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.352130 4693 scope.go:117] "RemoveContainer" containerID="96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.352426 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc"} err="failed to get container status \"96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc\": rpc error: code = NotFound desc = could not find container \"96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc\": container with ID starting with 96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.352473 4693 scope.go:117] "RemoveContainer" containerID="9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.352926 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9"} err="failed to get container status \"9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\": rpc error: code = NotFound desc = could not find container \"9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\": container with ID starting with 9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9 not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.352952 4693 scope.go:117] "RemoveContainer" containerID="c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.353274 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9"} err="failed to get container status \"c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\": rpc error: code = NotFound desc = could not find container \"c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\": container with ID starting with c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9 not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.353318 4693 scope.go:117] "RemoveContainer" containerID="38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.353727 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2"} err="failed to get container status \"38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\": rpc error: code = NotFound desc = could not find container \"38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\": container with ID starting with 38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2 not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.353751 4693 scope.go:117] "RemoveContainer" containerID="c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.354273 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344"} err="failed to get container status \"c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\": rpc error: code = NotFound desc = could not find container \"c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\": container with ID starting with c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344 not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.354310 4693 scope.go:117] "RemoveContainer" containerID="891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.354852 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da"} err="failed to get container status \"891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\": rpc error: code = NotFound desc = could not find container \"891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\": container with ID starting with 891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.354886 4693 scope.go:117] "RemoveContainer" containerID="2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.355238 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc"} err="failed to get container status \"2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\": rpc error: code = NotFound desc = could not find container \"2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\": container with ID starting with 2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.355278 4693 scope.go:117] "RemoveContainer" containerID="2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.355723 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a"} err="failed to get container status \"2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\": rpc error: code = NotFound desc = could not find container \"2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\": container with ID starting with 2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.355763 4693 scope.go:117] "RemoveContainer" containerID="4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.356133 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c"} err="failed to get container status \"4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\": rpc error: code = NotFound desc = could not find container \"4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\": container with ID starting with 4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.356172 4693 scope.go:117] "RemoveContainer" containerID="0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.356575 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd"} err="failed to get container status \"0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd\": rpc error: code = NotFound desc = could not find container \"0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd\": container with ID starting with 0e1ae30e2f8fe8ec741f52f19007fa2e1dd6926b8946bad429d90b58881415dd not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.356601 4693 scope.go:117] "RemoveContainer" containerID="96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.357253 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc"} err="failed to get container status \"96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc\": rpc error: code = NotFound desc = could not find container \"96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc\": container with ID starting with 96af4ab28aff5f7a33dcb23fe168e895eae8dac211b7f428650b8dbd8134f0cc not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.357295 4693 scope.go:117] "RemoveContainer" containerID="9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.357802 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9"} err="failed to get container status \"9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\": rpc error: code = NotFound desc = could not find container \"9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9\": container with ID starting with 9e87d30b1ef45f5d9911ae5159fb2c35393febe2490e8e0be17432a9f06fdfc9 not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.357848 4693 scope.go:117] "RemoveContainer" containerID="c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.358291 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9"} err="failed to get container status \"c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\": rpc error: code = NotFound desc = could not find container \"c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9\": container with ID starting with c8b558a47d26a04527ab0dd14e9f259d6ae90c0dc3a72c71dbc81546ce9f23b9 not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.358346 4693 scope.go:117] "RemoveContainer" containerID="38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.358768 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2"} err="failed to get container status \"38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\": rpc error: code = NotFound desc = could not find container \"38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2\": container with ID starting with 38ea0d8e25457ddc5c2eeae45694fe448bac6e9add53fd3198adf1720f8a68d2 not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.358792 4693 scope.go:117] "RemoveContainer" containerID="c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.359243 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344"} err="failed to get container status \"c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\": rpc error: code = NotFound desc = could not find container \"c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344\": container with ID starting with c1ab13d59f06d1f4134ab5640c68da52fc15191ef23f9570cd8a2bddce16f344 not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.359281 4693 scope.go:117] "RemoveContainer" containerID="891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.359714 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da"} err="failed to get container status \"891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\": rpc error: code = NotFound desc = could not find container \"891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da\": container with ID starting with 891c1f567176301313153d20098b0f22f7ae9fbf5dd70d4abca424dc27eeb7da not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.359750 4693 scope.go:117] "RemoveContainer" containerID="2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.360125 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc"} err="failed to get container status \"2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\": rpc error: code = NotFound desc = could not find container \"2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc\": container with ID starting with 2f3d2c2001643da15176cf726f4ed83f8a59493795aa079d9b1f9bd2e298ffbc not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.360152 4693 scope.go:117] "RemoveContainer" containerID="2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.360577 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a"} err="failed to get container status \"2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\": rpc error: code = NotFound desc = could not find container \"2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a\": container with ID starting with 2e9a39cbfac3bdcfa1e01e47bf7f55af5614e069287d60eef4bf453d6a8da54a not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.360622 4693 scope.go:117] "RemoveContainer" containerID="4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.361187 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c"} err="failed to get container status \"4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\": rpc error: code = NotFound desc = could not find container \"4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c\": container with ID starting with 4b1d3c21e15900a9e7dd136ebc17f161a11c61cbef6a8311f730b17662845f2c not found: ID does not exist" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.374484 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="379c61a3-51ff-4bdf-ab8b-5af8bf090716" path="/var/lib/kubelet/pods/379c61a3-51ff-4bdf-ab8b-5af8bf090716/volumes" Oct 08 07:26:35 crc kubenswrapper[4693]: I1008 07:26:35.380648 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:36 crc kubenswrapper[4693]: I1008 07:26:36.125111 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bfhs8_8ddc214e-6569-4b0e-8783-f484a001ce6a/kube-multus/2.log" Oct 08 07:26:36 crc kubenswrapper[4693]: I1008 07:26:36.127756 4693 generic.go:334] "Generic (PLEG): container finished" podID="bd278adc-acef-47df-84e3-b4e57708e2c5" containerID="f4c672f2f623501a26ed3c501a40879b5ca4097278d2f9aa448ff458bf6536d9" exitCode=0 Oct 08 07:26:36 crc kubenswrapper[4693]: I1008 07:26:36.127876 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" event={"ID":"bd278adc-acef-47df-84e3-b4e57708e2c5","Type":"ContainerDied","Data":"f4c672f2f623501a26ed3c501a40879b5ca4097278d2f9aa448ff458bf6536d9"} Oct 08 07:26:36 crc kubenswrapper[4693]: I1008 07:26:36.127914 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" event={"ID":"bd278adc-acef-47df-84e3-b4e57708e2c5","Type":"ContainerStarted","Data":"c905a76bffc17448e557605064870a300fa1c5f5c70873bab4dbf8e2290bae6a"} Oct 08 07:26:37 crc kubenswrapper[4693]: I1008 07:26:37.138788 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" event={"ID":"bd278adc-acef-47df-84e3-b4e57708e2c5","Type":"ContainerStarted","Data":"8640133b239a3fdac2d3716aa4ca0207329314b91d26f33a32757f42e33b9b99"} Oct 08 07:26:37 crc kubenswrapper[4693]: I1008 07:26:37.139448 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" event={"ID":"bd278adc-acef-47df-84e3-b4e57708e2c5","Type":"ContainerStarted","Data":"adcaef3e5230be1dd51523336f5a00b6362c17d77947b97d7f104dd6f1cb656b"} Oct 08 07:26:37 crc kubenswrapper[4693]: I1008 07:26:37.139463 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" event={"ID":"bd278adc-acef-47df-84e3-b4e57708e2c5","Type":"ContainerStarted","Data":"d78473491b78ea821c23a1721de2821b1c6dcfaf93ada5835c7c7823f8b8188f"} Oct 08 07:26:37 crc kubenswrapper[4693]: I1008 07:26:37.139477 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" event={"ID":"bd278adc-acef-47df-84e3-b4e57708e2c5","Type":"ContainerStarted","Data":"1c001612bdeec1a434b638573f73bfccd80bb920ea604b03e09364ee5d36efd5"} Oct 08 07:26:37 crc kubenswrapper[4693]: I1008 07:26:37.139488 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" event={"ID":"bd278adc-acef-47df-84e3-b4e57708e2c5","Type":"ContainerStarted","Data":"cf2903b8493eae56f65bdf67e03f78d951767133e676b19529321b34b9057150"} Oct 08 07:26:37 crc kubenswrapper[4693]: I1008 07:26:37.139500 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" event={"ID":"bd278adc-acef-47df-84e3-b4e57708e2c5","Type":"ContainerStarted","Data":"333055ae376c1e48767e1983c95bb13f36e1345e17020bb37fecf6e939c27f01"} Oct 08 07:26:40 crc kubenswrapper[4693]: I1008 07:26:40.164344 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" event={"ID":"bd278adc-acef-47df-84e3-b4e57708e2c5","Type":"ContainerStarted","Data":"dd7a5efb475165887d4dfa9388f0ad011b6f73c752905c518d2e6ebd4e9f0e2e"} Oct 08 07:26:42 crc kubenswrapper[4693]: I1008 07:26:42.189557 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" event={"ID":"bd278adc-acef-47df-84e3-b4e57708e2c5","Type":"ContainerStarted","Data":"a23afeb7c052e0b0f335a2c684a241a61fe981f23631eecf850ff8bfc442ddcb"} Oct 08 07:26:42 crc kubenswrapper[4693]: I1008 07:26:42.190715 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:42 crc kubenswrapper[4693]: I1008 07:26:42.190754 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:42 crc kubenswrapper[4693]: I1008 07:26:42.190775 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:42 crc kubenswrapper[4693]: I1008 07:26:42.225445 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:42 crc kubenswrapper[4693]: I1008 07:26:42.231281 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" podStartSLOduration=7.231263306 podStartE2EDuration="7.231263306s" podCreationTimestamp="2025-10-08 07:26:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:26:42.225144972 +0000 UTC m=+587.596109917" watchObservedRunningTime="2025-10-08 07:26:42.231263306 +0000 UTC m=+587.602228251" Oct 08 07:26:42 crc kubenswrapper[4693]: I1008 07:26:42.246998 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:26:50 crc kubenswrapper[4693]: I1008 07:26:50.363586 4693 scope.go:117] "RemoveContainer" containerID="4265598bf83e8b88e476a3eae9245760e378f6d7351187cc4599c3af2f31f4c8" Oct 08 07:26:50 crc kubenswrapper[4693]: E1008 07:26:50.365417 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-bfhs8_openshift-multus(8ddc214e-6569-4b0e-8783-f484a001ce6a)\"" pod="openshift-multus/multus-bfhs8" podUID="8ddc214e-6569-4b0e-8783-f484a001ce6a" Oct 08 07:27:02 crc kubenswrapper[4693]: I1008 07:27:02.363455 4693 scope.go:117] "RemoveContainer" containerID="4265598bf83e8b88e476a3eae9245760e378f6d7351187cc4599c3af2f31f4c8" Oct 08 07:27:03 crc kubenswrapper[4693]: I1008 07:27:03.337392 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bfhs8_8ddc214e-6569-4b0e-8783-f484a001ce6a/kube-multus/2.log" Oct 08 07:27:03 crc kubenswrapper[4693]: I1008 07:27:03.338140 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bfhs8" event={"ID":"8ddc214e-6569-4b0e-8783-f484a001ce6a","Type":"ContainerStarted","Data":"e61791f10c65a9efca68bcb04787414fe051ffd3e1b229ea6fc967d0806b4fe5"} Oct 08 07:27:05 crc kubenswrapper[4693]: I1008 07:27:05.425296 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-8h5s7" Oct 08 07:27:18 crc kubenswrapper[4693]: I1008 07:27:18.803726 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2"] Oct 08 07:27:18 crc kubenswrapper[4693]: I1008 07:27:18.805534 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2" Oct 08 07:27:18 crc kubenswrapper[4693]: I1008 07:27:18.807612 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 08 07:27:18 crc kubenswrapper[4693]: I1008 07:27:18.823134 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2"] Oct 08 07:27:18 crc kubenswrapper[4693]: I1008 07:27:18.859722 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2378198d-e3ee-4cdc-a298-0d386fdf78ae-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2\" (UID: \"2378198d-e3ee-4cdc-a298-0d386fdf78ae\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2" Oct 08 07:27:18 crc kubenswrapper[4693]: I1008 07:27:18.859841 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24wn9\" (UniqueName: \"kubernetes.io/projected/2378198d-e3ee-4cdc-a298-0d386fdf78ae-kube-api-access-24wn9\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2\" (UID: \"2378198d-e3ee-4cdc-a298-0d386fdf78ae\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2" Oct 08 07:27:18 crc kubenswrapper[4693]: I1008 07:27:18.860033 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2378198d-e3ee-4cdc-a298-0d386fdf78ae-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2\" (UID: \"2378198d-e3ee-4cdc-a298-0d386fdf78ae\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2" Oct 08 07:27:18 crc kubenswrapper[4693]: I1008 07:27:18.961568 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2378198d-e3ee-4cdc-a298-0d386fdf78ae-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2\" (UID: \"2378198d-e3ee-4cdc-a298-0d386fdf78ae\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2" Oct 08 07:27:18 crc kubenswrapper[4693]: I1008 07:27:18.961741 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2378198d-e3ee-4cdc-a298-0d386fdf78ae-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2\" (UID: \"2378198d-e3ee-4cdc-a298-0d386fdf78ae\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2" Oct 08 07:27:18 crc kubenswrapper[4693]: I1008 07:27:18.961951 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24wn9\" (UniqueName: \"kubernetes.io/projected/2378198d-e3ee-4cdc-a298-0d386fdf78ae-kube-api-access-24wn9\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2\" (UID: \"2378198d-e3ee-4cdc-a298-0d386fdf78ae\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2" Oct 08 07:27:18 crc kubenswrapper[4693]: I1008 07:27:18.962548 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2378198d-e3ee-4cdc-a298-0d386fdf78ae-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2\" (UID: \"2378198d-e3ee-4cdc-a298-0d386fdf78ae\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2" Oct 08 07:27:18 crc kubenswrapper[4693]: I1008 07:27:18.963031 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2378198d-e3ee-4cdc-a298-0d386fdf78ae-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2\" (UID: \"2378198d-e3ee-4cdc-a298-0d386fdf78ae\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2" Oct 08 07:27:18 crc kubenswrapper[4693]: I1008 07:27:18.997734 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24wn9\" (UniqueName: \"kubernetes.io/projected/2378198d-e3ee-4cdc-a298-0d386fdf78ae-kube-api-access-24wn9\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2\" (UID: \"2378198d-e3ee-4cdc-a298-0d386fdf78ae\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2" Oct 08 07:27:19 crc kubenswrapper[4693]: I1008 07:27:19.135039 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2" Oct 08 07:27:19 crc kubenswrapper[4693]: I1008 07:27:19.420526 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2"] Oct 08 07:27:19 crc kubenswrapper[4693]: I1008 07:27:19.444889 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2" event={"ID":"2378198d-e3ee-4cdc-a298-0d386fdf78ae","Type":"ContainerStarted","Data":"d74109dfc339402d78ea714785d23d6682a7e53d47535e5f56c96c8de734948a"} Oct 08 07:27:20 crc kubenswrapper[4693]: I1008 07:27:20.453436 4693 generic.go:334] "Generic (PLEG): container finished" podID="2378198d-e3ee-4cdc-a298-0d386fdf78ae" containerID="5bd825419a3b3fb234f390b08b5104b3179a59da59f7c9216449b69273a92779" exitCode=0 Oct 08 07:27:20 crc kubenswrapper[4693]: I1008 07:27:20.453580 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2" event={"ID":"2378198d-e3ee-4cdc-a298-0d386fdf78ae","Type":"ContainerDied","Data":"5bd825419a3b3fb234f390b08b5104b3179a59da59f7c9216449b69273a92779"} Oct 08 07:27:22 crc kubenswrapper[4693]: I1008 07:27:22.469784 4693 generic.go:334] "Generic (PLEG): container finished" podID="2378198d-e3ee-4cdc-a298-0d386fdf78ae" containerID="00c581326f130fc4959bd82f4d5737457db60c8680527642faf7ac1a45e4a2e0" exitCode=0 Oct 08 07:27:22 crc kubenswrapper[4693]: I1008 07:27:22.469884 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2" event={"ID":"2378198d-e3ee-4cdc-a298-0d386fdf78ae","Type":"ContainerDied","Data":"00c581326f130fc4959bd82f4d5737457db60c8680527642faf7ac1a45e4a2e0"} Oct 08 07:27:23 crc kubenswrapper[4693]: I1008 07:27:23.479975 4693 generic.go:334] "Generic (PLEG): container finished" podID="2378198d-e3ee-4cdc-a298-0d386fdf78ae" containerID="de08fa6a2f00082deeaf51f31e44485108740d958b34812a1e88726d2b6cf132" exitCode=0 Oct 08 07:27:23 crc kubenswrapper[4693]: I1008 07:27:23.480056 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2" event={"ID":"2378198d-e3ee-4cdc-a298-0d386fdf78ae","Type":"ContainerDied","Data":"de08fa6a2f00082deeaf51f31e44485108740d958b34812a1e88726d2b6cf132"} Oct 08 07:27:24 crc kubenswrapper[4693]: I1008 07:27:24.834403 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2" Oct 08 07:27:24 crc kubenswrapper[4693]: I1008 07:27:24.940848 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2378198d-e3ee-4cdc-a298-0d386fdf78ae-bundle\") pod \"2378198d-e3ee-4cdc-a298-0d386fdf78ae\" (UID: \"2378198d-e3ee-4cdc-a298-0d386fdf78ae\") " Oct 08 07:27:24 crc kubenswrapper[4693]: I1008 07:27:24.940938 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-24wn9\" (UniqueName: \"kubernetes.io/projected/2378198d-e3ee-4cdc-a298-0d386fdf78ae-kube-api-access-24wn9\") pod \"2378198d-e3ee-4cdc-a298-0d386fdf78ae\" (UID: \"2378198d-e3ee-4cdc-a298-0d386fdf78ae\") " Oct 08 07:27:24 crc kubenswrapper[4693]: I1008 07:27:24.940976 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2378198d-e3ee-4cdc-a298-0d386fdf78ae-util\") pod \"2378198d-e3ee-4cdc-a298-0d386fdf78ae\" (UID: \"2378198d-e3ee-4cdc-a298-0d386fdf78ae\") " Oct 08 07:27:24 crc kubenswrapper[4693]: I1008 07:27:24.944404 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2378198d-e3ee-4cdc-a298-0d386fdf78ae-bundle" (OuterVolumeSpecName: "bundle") pod "2378198d-e3ee-4cdc-a298-0d386fdf78ae" (UID: "2378198d-e3ee-4cdc-a298-0d386fdf78ae"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:27:24 crc kubenswrapper[4693]: I1008 07:27:24.950566 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2378198d-e3ee-4cdc-a298-0d386fdf78ae-kube-api-access-24wn9" (OuterVolumeSpecName: "kube-api-access-24wn9") pod "2378198d-e3ee-4cdc-a298-0d386fdf78ae" (UID: "2378198d-e3ee-4cdc-a298-0d386fdf78ae"). InnerVolumeSpecName "kube-api-access-24wn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:27:24 crc kubenswrapper[4693]: I1008 07:27:24.963960 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2378198d-e3ee-4cdc-a298-0d386fdf78ae-util" (OuterVolumeSpecName: "util") pod "2378198d-e3ee-4cdc-a298-0d386fdf78ae" (UID: "2378198d-e3ee-4cdc-a298-0d386fdf78ae"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:27:25 crc kubenswrapper[4693]: I1008 07:27:25.042295 4693 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2378198d-e3ee-4cdc-a298-0d386fdf78ae-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:27:25 crc kubenswrapper[4693]: I1008 07:27:25.042349 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-24wn9\" (UniqueName: \"kubernetes.io/projected/2378198d-e3ee-4cdc-a298-0d386fdf78ae-kube-api-access-24wn9\") on node \"crc\" DevicePath \"\"" Oct 08 07:27:25 crc kubenswrapper[4693]: I1008 07:27:25.042400 4693 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2378198d-e3ee-4cdc-a298-0d386fdf78ae-util\") on node \"crc\" DevicePath \"\"" Oct 08 07:27:25 crc kubenswrapper[4693]: I1008 07:27:25.497047 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2" event={"ID":"2378198d-e3ee-4cdc-a298-0d386fdf78ae","Type":"ContainerDied","Data":"d74109dfc339402d78ea714785d23d6682a7e53d47535e5f56c96c8de734948a"} Oct 08 07:27:25 crc kubenswrapper[4693]: I1008 07:27:25.497097 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d74109dfc339402d78ea714785d23d6682a7e53d47535e5f56c96c8de734948a" Oct 08 07:27:25 crc kubenswrapper[4693]: I1008 07:27:25.497227 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2" Oct 08 07:27:27 crc kubenswrapper[4693]: I1008 07:27:27.465648 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-48qbg"] Oct 08 07:27:27 crc kubenswrapper[4693]: E1008 07:27:27.465930 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2378198d-e3ee-4cdc-a298-0d386fdf78ae" containerName="pull" Oct 08 07:27:27 crc kubenswrapper[4693]: I1008 07:27:27.465948 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="2378198d-e3ee-4cdc-a298-0d386fdf78ae" containerName="pull" Oct 08 07:27:27 crc kubenswrapper[4693]: E1008 07:27:27.465968 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2378198d-e3ee-4cdc-a298-0d386fdf78ae" containerName="extract" Oct 08 07:27:27 crc kubenswrapper[4693]: I1008 07:27:27.465979 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="2378198d-e3ee-4cdc-a298-0d386fdf78ae" containerName="extract" Oct 08 07:27:27 crc kubenswrapper[4693]: E1008 07:27:27.466001 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2378198d-e3ee-4cdc-a298-0d386fdf78ae" containerName="util" Oct 08 07:27:27 crc kubenswrapper[4693]: I1008 07:27:27.466014 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="2378198d-e3ee-4cdc-a298-0d386fdf78ae" containerName="util" Oct 08 07:27:27 crc kubenswrapper[4693]: I1008 07:27:27.466158 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="2378198d-e3ee-4cdc-a298-0d386fdf78ae" containerName="extract" Oct 08 07:27:27 crc kubenswrapper[4693]: I1008 07:27:27.466705 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-48qbg" Oct 08 07:27:27 crc kubenswrapper[4693]: I1008 07:27:27.468689 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-4jj2z" Oct 08 07:27:27 crc kubenswrapper[4693]: I1008 07:27:27.469438 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Oct 08 07:27:27 crc kubenswrapper[4693]: I1008 07:27:27.469480 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Oct 08 07:27:27 crc kubenswrapper[4693]: I1008 07:27:27.488003 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-48qbg"] Oct 08 07:27:27 crc kubenswrapper[4693]: I1008 07:27:27.576127 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzg6z\" (UniqueName: \"kubernetes.io/projected/55f815ee-eb64-4f69-b192-081c71664f3b-kube-api-access-hzg6z\") pod \"nmstate-operator-858ddd8f98-48qbg\" (UID: \"55f815ee-eb64-4f69-b192-081c71664f3b\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-48qbg" Oct 08 07:27:27 crc kubenswrapper[4693]: I1008 07:27:27.678218 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzg6z\" (UniqueName: \"kubernetes.io/projected/55f815ee-eb64-4f69-b192-081c71664f3b-kube-api-access-hzg6z\") pod \"nmstate-operator-858ddd8f98-48qbg\" (UID: \"55f815ee-eb64-4f69-b192-081c71664f3b\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-48qbg" Oct 08 07:27:27 crc kubenswrapper[4693]: I1008 07:27:27.711667 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzg6z\" (UniqueName: \"kubernetes.io/projected/55f815ee-eb64-4f69-b192-081c71664f3b-kube-api-access-hzg6z\") pod \"nmstate-operator-858ddd8f98-48qbg\" (UID: \"55f815ee-eb64-4f69-b192-081c71664f3b\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-48qbg" Oct 08 07:27:27 crc kubenswrapper[4693]: I1008 07:27:27.785920 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-48qbg" Oct 08 07:27:28 crc kubenswrapper[4693]: I1008 07:27:28.042943 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-48qbg"] Oct 08 07:27:28 crc kubenswrapper[4693]: I1008 07:27:28.519373 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-48qbg" event={"ID":"55f815ee-eb64-4f69-b192-081c71664f3b","Type":"ContainerStarted","Data":"3b578598e71fbf3816aab4b62a2cd58209658affcab778dd71c8d32cb056ee28"} Oct 08 07:27:33 crc kubenswrapper[4693]: I1008 07:27:33.556622 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-48qbg" event={"ID":"55f815ee-eb64-4f69-b192-081c71664f3b","Type":"ContainerStarted","Data":"578fa9c10c3a1dc07ca403b8d50e9596cd3f95967baf5cb8f302de96467a2fc3"} Oct 08 07:27:33 crc kubenswrapper[4693]: I1008 07:27:33.585638 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-858ddd8f98-48qbg" podStartSLOduration=2.005638385 podStartE2EDuration="6.585605547s" podCreationTimestamp="2025-10-08 07:27:27 +0000 UTC" firstStartedPulling="2025-10-08 07:27:28.057015465 +0000 UTC m=+633.427980410" lastFinishedPulling="2025-10-08 07:27:32.636982597 +0000 UTC m=+638.007947572" observedRunningTime="2025-10-08 07:27:33.582699839 +0000 UTC m=+638.953664884" watchObservedRunningTime="2025-10-08 07:27:33.585605547 +0000 UTC m=+638.956570522" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.685703 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-945gn"] Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.687052 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-945gn" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.689479 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-ztzlm" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.705298 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-945gn"] Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.726972 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-fpzdq"] Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.728406 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-fpzdq" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.744035 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.753915 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-j9lkx"] Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.756725 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-j9lkx" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.777846 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-fpzdq"] Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.836084 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-j658k"] Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.836761 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-j658k" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.841471 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.841775 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-zrftk" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.841954 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.849748 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-j658k"] Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.886455 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/69abf05d-a12d-4255-a1cf-a57efdc57a93-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-fpzdq\" (UID: \"69abf05d-a12d-4255-a1cf-a57efdc57a93\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-fpzdq" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.886529 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fch29\" (UniqueName: \"kubernetes.io/projected/69abf05d-a12d-4255-a1cf-a57efdc57a93-kube-api-access-fch29\") pod \"nmstate-webhook-6cdbc54649-fpzdq\" (UID: \"69abf05d-a12d-4255-a1cf-a57efdc57a93\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-fpzdq" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.886607 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5ad24461-fc5f-44fd-94e2-68b8ef30e152-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-j658k\" (UID: \"5ad24461-fc5f-44fd-94e2-68b8ef30e152\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-j658k" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.886677 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/a28c017b-9170-4749-80cf-60b85681a4e7-dbus-socket\") pod \"nmstate-handler-j9lkx\" (UID: \"a28c017b-9170-4749-80cf-60b85681a4e7\") " pod="openshift-nmstate/nmstate-handler-j9lkx" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.886709 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/a28c017b-9170-4749-80cf-60b85681a4e7-nmstate-lock\") pod \"nmstate-handler-j9lkx\" (UID: \"a28c017b-9170-4749-80cf-60b85681a4e7\") " pod="openshift-nmstate/nmstate-handler-j9lkx" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.886775 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/a28c017b-9170-4749-80cf-60b85681a4e7-ovs-socket\") pod \"nmstate-handler-j9lkx\" (UID: \"a28c017b-9170-4749-80cf-60b85681a4e7\") " pod="openshift-nmstate/nmstate-handler-j9lkx" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.886844 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/5ad24461-fc5f-44fd-94e2-68b8ef30e152-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-j658k\" (UID: \"5ad24461-fc5f-44fd-94e2-68b8ef30e152\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-j658k" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.886898 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwlqq\" (UniqueName: \"kubernetes.io/projected/ae5b67a9-cb0d-4f73-8353-2bba4708a176-kube-api-access-xwlqq\") pod \"nmstate-metrics-fdff9cb8d-945gn\" (UID: \"ae5b67a9-cb0d-4f73-8353-2bba4708a176\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-945gn" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.886945 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlrzz\" (UniqueName: \"kubernetes.io/projected/5ad24461-fc5f-44fd-94e2-68b8ef30e152-kube-api-access-jlrzz\") pod \"nmstate-console-plugin-6b874cbd85-j658k\" (UID: \"5ad24461-fc5f-44fd-94e2-68b8ef30e152\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-j658k" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.887034 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tc2gf\" (UniqueName: \"kubernetes.io/projected/a28c017b-9170-4749-80cf-60b85681a4e7-kube-api-access-tc2gf\") pod \"nmstate-handler-j9lkx\" (UID: \"a28c017b-9170-4749-80cf-60b85681a4e7\") " pod="openshift-nmstate/nmstate-handler-j9lkx" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.989559 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5ad24461-fc5f-44fd-94e2-68b8ef30e152-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-j658k\" (UID: \"5ad24461-fc5f-44fd-94e2-68b8ef30e152\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-j658k" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.989617 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/a28c017b-9170-4749-80cf-60b85681a4e7-nmstate-lock\") pod \"nmstate-handler-j9lkx\" (UID: \"a28c017b-9170-4749-80cf-60b85681a4e7\") " pod="openshift-nmstate/nmstate-handler-j9lkx" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.989634 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/a28c017b-9170-4749-80cf-60b85681a4e7-dbus-socket\") pod \"nmstate-handler-j9lkx\" (UID: \"a28c017b-9170-4749-80cf-60b85681a4e7\") " pod="openshift-nmstate/nmstate-handler-j9lkx" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.989660 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/a28c017b-9170-4749-80cf-60b85681a4e7-ovs-socket\") pod \"nmstate-handler-j9lkx\" (UID: \"a28c017b-9170-4749-80cf-60b85681a4e7\") " pod="openshift-nmstate/nmstate-handler-j9lkx" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.989684 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/5ad24461-fc5f-44fd-94e2-68b8ef30e152-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-j658k\" (UID: \"5ad24461-fc5f-44fd-94e2-68b8ef30e152\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-j658k" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.989707 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwlqq\" (UniqueName: \"kubernetes.io/projected/ae5b67a9-cb0d-4f73-8353-2bba4708a176-kube-api-access-xwlqq\") pod \"nmstate-metrics-fdff9cb8d-945gn\" (UID: \"ae5b67a9-cb0d-4f73-8353-2bba4708a176\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-945gn" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.989719 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/a28c017b-9170-4749-80cf-60b85681a4e7-nmstate-lock\") pod \"nmstate-handler-j9lkx\" (UID: \"a28c017b-9170-4749-80cf-60b85681a4e7\") " pod="openshift-nmstate/nmstate-handler-j9lkx" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.989727 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlrzz\" (UniqueName: \"kubernetes.io/projected/5ad24461-fc5f-44fd-94e2-68b8ef30e152-kube-api-access-jlrzz\") pod \"nmstate-console-plugin-6b874cbd85-j658k\" (UID: \"5ad24461-fc5f-44fd-94e2-68b8ef30e152\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-j658k" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.989979 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/a28c017b-9170-4749-80cf-60b85681a4e7-dbus-socket\") pod \"nmstate-handler-j9lkx\" (UID: \"a28c017b-9170-4749-80cf-60b85681a4e7\") " pod="openshift-nmstate/nmstate-handler-j9lkx" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.990004 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/a28c017b-9170-4749-80cf-60b85681a4e7-ovs-socket\") pod \"nmstate-handler-j9lkx\" (UID: \"a28c017b-9170-4749-80cf-60b85681a4e7\") " pod="openshift-nmstate/nmstate-handler-j9lkx" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.990012 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tc2gf\" (UniqueName: \"kubernetes.io/projected/a28c017b-9170-4749-80cf-60b85681a4e7-kube-api-access-tc2gf\") pod \"nmstate-handler-j9lkx\" (UID: \"a28c017b-9170-4749-80cf-60b85681a4e7\") " pod="openshift-nmstate/nmstate-handler-j9lkx" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.990032 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/69abf05d-a12d-4255-a1cf-a57efdc57a93-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-fpzdq\" (UID: \"69abf05d-a12d-4255-a1cf-a57efdc57a93\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-fpzdq" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.990048 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fch29\" (UniqueName: \"kubernetes.io/projected/69abf05d-a12d-4255-a1cf-a57efdc57a93-kube-api-access-fch29\") pod \"nmstate-webhook-6cdbc54649-fpzdq\" (UID: \"69abf05d-a12d-4255-a1cf-a57efdc57a93\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-fpzdq" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.990897 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5ad24461-fc5f-44fd-94e2-68b8ef30e152-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-j658k\" (UID: \"5ad24461-fc5f-44fd-94e2-68b8ef30e152\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-j658k" Oct 08 07:27:34 crc kubenswrapper[4693]: I1008 07:27:34.996171 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/69abf05d-a12d-4255-a1cf-a57efdc57a93-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-fpzdq\" (UID: \"69abf05d-a12d-4255-a1cf-a57efdc57a93\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-fpzdq" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.000899 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/5ad24461-fc5f-44fd-94e2-68b8ef30e152-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-j658k\" (UID: \"5ad24461-fc5f-44fd-94e2-68b8ef30e152\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-j658k" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.016633 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlrzz\" (UniqueName: \"kubernetes.io/projected/5ad24461-fc5f-44fd-94e2-68b8ef30e152-kube-api-access-jlrzz\") pod \"nmstate-console-plugin-6b874cbd85-j658k\" (UID: \"5ad24461-fc5f-44fd-94e2-68b8ef30e152\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-j658k" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.029602 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-5f85d897fd-z69cs"] Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.030208 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5f85d897fd-z69cs" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.032696 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fch29\" (UniqueName: \"kubernetes.io/projected/69abf05d-a12d-4255-a1cf-a57efdc57a93-kube-api-access-fch29\") pod \"nmstate-webhook-6cdbc54649-fpzdq\" (UID: \"69abf05d-a12d-4255-a1cf-a57efdc57a93\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-fpzdq" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.036462 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwlqq\" (UniqueName: \"kubernetes.io/projected/ae5b67a9-cb0d-4f73-8353-2bba4708a176-kube-api-access-xwlqq\") pod \"nmstate-metrics-fdff9cb8d-945gn\" (UID: \"ae5b67a9-cb0d-4f73-8353-2bba4708a176\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-945gn" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.046362 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5f85d897fd-z69cs"] Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.047602 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tc2gf\" (UniqueName: \"kubernetes.io/projected/a28c017b-9170-4749-80cf-60b85681a4e7-kube-api-access-tc2gf\") pod \"nmstate-handler-j9lkx\" (UID: \"a28c017b-9170-4749-80cf-60b85681a4e7\") " pod="openshift-nmstate/nmstate-handler-j9lkx" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.066732 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-fpzdq" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.078058 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-j9lkx" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.091359 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/673439ec-9f54-43f5-8985-486239ab2b6e-service-ca\") pod \"console-5f85d897fd-z69cs\" (UID: \"673439ec-9f54-43f5-8985-486239ab2b6e\") " pod="openshift-console/console-5f85d897fd-z69cs" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.091444 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/673439ec-9f54-43f5-8985-486239ab2b6e-console-config\") pod \"console-5f85d897fd-z69cs\" (UID: \"673439ec-9f54-43f5-8985-486239ab2b6e\") " pod="openshift-console/console-5f85d897fd-z69cs" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.091465 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ggtj9\" (UniqueName: \"kubernetes.io/projected/673439ec-9f54-43f5-8985-486239ab2b6e-kube-api-access-ggtj9\") pod \"console-5f85d897fd-z69cs\" (UID: \"673439ec-9f54-43f5-8985-486239ab2b6e\") " pod="openshift-console/console-5f85d897fd-z69cs" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.091486 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/673439ec-9f54-43f5-8985-486239ab2b6e-trusted-ca-bundle\") pod \"console-5f85d897fd-z69cs\" (UID: \"673439ec-9f54-43f5-8985-486239ab2b6e\") " pod="openshift-console/console-5f85d897fd-z69cs" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.091504 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/673439ec-9f54-43f5-8985-486239ab2b6e-console-serving-cert\") pod \"console-5f85d897fd-z69cs\" (UID: \"673439ec-9f54-43f5-8985-486239ab2b6e\") " pod="openshift-console/console-5f85d897fd-z69cs" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.091527 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/673439ec-9f54-43f5-8985-486239ab2b6e-console-oauth-config\") pod \"console-5f85d897fd-z69cs\" (UID: \"673439ec-9f54-43f5-8985-486239ab2b6e\") " pod="openshift-console/console-5f85d897fd-z69cs" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.091552 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/673439ec-9f54-43f5-8985-486239ab2b6e-oauth-serving-cert\") pod \"console-5f85d897fd-z69cs\" (UID: \"673439ec-9f54-43f5-8985-486239ab2b6e\") " pod="openshift-console/console-5f85d897fd-z69cs" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.149092 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-j658k" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.192778 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/673439ec-9f54-43f5-8985-486239ab2b6e-service-ca\") pod \"console-5f85d897fd-z69cs\" (UID: \"673439ec-9f54-43f5-8985-486239ab2b6e\") " pod="openshift-console/console-5f85d897fd-z69cs" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.193076 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/673439ec-9f54-43f5-8985-486239ab2b6e-console-config\") pod \"console-5f85d897fd-z69cs\" (UID: \"673439ec-9f54-43f5-8985-486239ab2b6e\") " pod="openshift-console/console-5f85d897fd-z69cs" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.193094 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ggtj9\" (UniqueName: \"kubernetes.io/projected/673439ec-9f54-43f5-8985-486239ab2b6e-kube-api-access-ggtj9\") pod \"console-5f85d897fd-z69cs\" (UID: \"673439ec-9f54-43f5-8985-486239ab2b6e\") " pod="openshift-console/console-5f85d897fd-z69cs" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.193114 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/673439ec-9f54-43f5-8985-486239ab2b6e-trusted-ca-bundle\") pod \"console-5f85d897fd-z69cs\" (UID: \"673439ec-9f54-43f5-8985-486239ab2b6e\") " pod="openshift-console/console-5f85d897fd-z69cs" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.193133 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/673439ec-9f54-43f5-8985-486239ab2b6e-console-serving-cert\") pod \"console-5f85d897fd-z69cs\" (UID: \"673439ec-9f54-43f5-8985-486239ab2b6e\") " pod="openshift-console/console-5f85d897fd-z69cs" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.193157 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/673439ec-9f54-43f5-8985-486239ab2b6e-console-oauth-config\") pod \"console-5f85d897fd-z69cs\" (UID: \"673439ec-9f54-43f5-8985-486239ab2b6e\") " pod="openshift-console/console-5f85d897fd-z69cs" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.193178 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/673439ec-9f54-43f5-8985-486239ab2b6e-oauth-serving-cert\") pod \"console-5f85d897fd-z69cs\" (UID: \"673439ec-9f54-43f5-8985-486239ab2b6e\") " pod="openshift-console/console-5f85d897fd-z69cs" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.193949 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/673439ec-9f54-43f5-8985-486239ab2b6e-oauth-serving-cert\") pod \"console-5f85d897fd-z69cs\" (UID: \"673439ec-9f54-43f5-8985-486239ab2b6e\") " pod="openshift-console/console-5f85d897fd-z69cs" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.195015 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/673439ec-9f54-43f5-8985-486239ab2b6e-console-config\") pod \"console-5f85d897fd-z69cs\" (UID: \"673439ec-9f54-43f5-8985-486239ab2b6e\") " pod="openshift-console/console-5f85d897fd-z69cs" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.195254 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/673439ec-9f54-43f5-8985-486239ab2b6e-service-ca\") pod \"console-5f85d897fd-z69cs\" (UID: \"673439ec-9f54-43f5-8985-486239ab2b6e\") " pod="openshift-console/console-5f85d897fd-z69cs" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.195265 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/673439ec-9f54-43f5-8985-486239ab2b6e-trusted-ca-bundle\") pod \"console-5f85d897fd-z69cs\" (UID: \"673439ec-9f54-43f5-8985-486239ab2b6e\") " pod="openshift-console/console-5f85d897fd-z69cs" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.198370 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/673439ec-9f54-43f5-8985-486239ab2b6e-console-oauth-config\") pod \"console-5f85d897fd-z69cs\" (UID: \"673439ec-9f54-43f5-8985-486239ab2b6e\") " pod="openshift-console/console-5f85d897fd-z69cs" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.198395 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/673439ec-9f54-43f5-8985-486239ab2b6e-console-serving-cert\") pod \"console-5f85d897fd-z69cs\" (UID: \"673439ec-9f54-43f5-8985-486239ab2b6e\") " pod="openshift-console/console-5f85d897fd-z69cs" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.207887 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ggtj9\" (UniqueName: \"kubernetes.io/projected/673439ec-9f54-43f5-8985-486239ab2b6e-kube-api-access-ggtj9\") pod \"console-5f85d897fd-z69cs\" (UID: \"673439ec-9f54-43f5-8985-486239ab2b6e\") " pod="openshift-console/console-5f85d897fd-z69cs" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.269004 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-fpzdq"] Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.308721 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-945gn" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.316143 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-j658k"] Oct 08 07:27:35 crc kubenswrapper[4693]: W1008 07:27:35.330792 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5ad24461_fc5f_44fd_94e2_68b8ef30e152.slice/crio-cef54fd5b32d6754f0c44a753503e5f8c3a27004e4d84a2886e4d9a48f80759b WatchSource:0}: Error finding container cef54fd5b32d6754f0c44a753503e5f8c3a27004e4d84a2886e4d9a48f80759b: Status 404 returned error can't find the container with id cef54fd5b32d6754f0c44a753503e5f8c3a27004e4d84a2886e4d9a48f80759b Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.389889 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5f85d897fd-z69cs" Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.539205 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-945gn"] Oct 08 07:27:35 crc kubenswrapper[4693]: W1008 07:27:35.540302 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podae5b67a9_cb0d_4f73_8353_2bba4708a176.slice/crio-75a6f5bfca029e8405adf87c13d92d61618f0efdf68e811ff5e67da58bc73cfa WatchSource:0}: Error finding container 75a6f5bfca029e8405adf87c13d92d61618f0efdf68e811ff5e67da58bc73cfa: Status 404 returned error can't find the container with id 75a6f5bfca029e8405adf87c13d92d61618f0efdf68e811ff5e67da58bc73cfa Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.569492 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-j9lkx" event={"ID":"a28c017b-9170-4749-80cf-60b85681a4e7","Type":"ContainerStarted","Data":"5ff7ff46ff637c084d4009c4cbdbf1b1dcce726a595871ab6d639112f79de0b8"} Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.574050 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-945gn" event={"ID":"ae5b67a9-cb0d-4f73-8353-2bba4708a176","Type":"ContainerStarted","Data":"75a6f5bfca029e8405adf87c13d92d61618f0efdf68e811ff5e67da58bc73cfa"} Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.575154 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-fpzdq" event={"ID":"69abf05d-a12d-4255-a1cf-a57efdc57a93","Type":"ContainerStarted","Data":"0ee092514f8d4602baf96c194986d0e0b72cf7280a2986cff209a34dafb9fc1b"} Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.576072 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-j658k" event={"ID":"5ad24461-fc5f-44fd-94e2-68b8ef30e152","Type":"ContainerStarted","Data":"cef54fd5b32d6754f0c44a753503e5f8c3a27004e4d84a2886e4d9a48f80759b"} Oct 08 07:27:35 crc kubenswrapper[4693]: I1008 07:27:35.622753 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5f85d897fd-z69cs"] Oct 08 07:27:36 crc kubenswrapper[4693]: I1008 07:27:36.583960 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5f85d897fd-z69cs" event={"ID":"673439ec-9f54-43f5-8985-486239ab2b6e","Type":"ContainerStarted","Data":"83aec69a937639f82145ec282f440e192a3c585942ec9ff213aa3452e9659e1e"} Oct 08 07:27:36 crc kubenswrapper[4693]: I1008 07:27:36.584195 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5f85d897fd-z69cs" event={"ID":"673439ec-9f54-43f5-8985-486239ab2b6e","Type":"ContainerStarted","Data":"22812f48e0fde7ec094f2ffd9a0f0f5bb2b6154a548a5169d6f1d43801fc8388"} Oct 08 07:27:36 crc kubenswrapper[4693]: I1008 07:27:36.613196 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-5f85d897fd-z69cs" podStartSLOduration=2.613180931 podStartE2EDuration="2.613180931s" podCreationTimestamp="2025-10-08 07:27:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:27:36.608996799 +0000 UTC m=+641.979961744" watchObservedRunningTime="2025-10-08 07:27:36.613180931 +0000 UTC m=+641.984145866" Oct 08 07:27:38 crc kubenswrapper[4693]: I1008 07:27:38.599269 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-j658k" event={"ID":"5ad24461-fc5f-44fd-94e2-68b8ef30e152","Type":"ContainerStarted","Data":"47b97c280a5562971b8e97ba53243463eb73ec038a05fe69b423ac81a506fe60"} Oct 08 07:27:38 crc kubenswrapper[4693]: I1008 07:27:38.601414 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-j9lkx" event={"ID":"a28c017b-9170-4749-80cf-60b85681a4e7","Type":"ContainerStarted","Data":"41687d31eb2fb04059ab8bdde8a51c72447a30540f8729697acecd4e5e88d421"} Oct 08 07:27:38 crc kubenswrapper[4693]: I1008 07:27:38.601664 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-j9lkx" Oct 08 07:27:38 crc kubenswrapper[4693]: I1008 07:27:38.603633 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-945gn" event={"ID":"ae5b67a9-cb0d-4f73-8353-2bba4708a176","Type":"ContainerStarted","Data":"52b4108d0931c8570f55d4143176a489ac4ed4fe1289e12f70cfaa459a86e087"} Oct 08 07:27:38 crc kubenswrapper[4693]: I1008 07:27:38.609662 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-fpzdq" event={"ID":"69abf05d-a12d-4255-a1cf-a57efdc57a93","Type":"ContainerStarted","Data":"ef2afdc22725efb0f1720236147b3ffbe9339a8db56e32f424da7bc6ca39deb0"} Oct 08 07:27:38 crc kubenswrapper[4693]: I1008 07:27:38.610360 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-fpzdq" Oct 08 07:27:38 crc kubenswrapper[4693]: I1008 07:27:38.620269 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-j658k" podStartSLOduration=2.029666478 podStartE2EDuration="4.620248537s" podCreationTimestamp="2025-10-08 07:27:34 +0000 UTC" firstStartedPulling="2025-10-08 07:27:35.333795523 +0000 UTC m=+640.704760458" lastFinishedPulling="2025-10-08 07:27:37.924377572 +0000 UTC m=+643.295342517" observedRunningTime="2025-10-08 07:27:38.61809177 +0000 UTC m=+643.989056745" watchObservedRunningTime="2025-10-08 07:27:38.620248537 +0000 UTC m=+643.991213512" Oct 08 07:27:38 crc kubenswrapper[4693]: I1008 07:27:38.679495 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-fpzdq" podStartSLOduration=2.0411524070000002 podStartE2EDuration="4.679469525s" podCreationTimestamp="2025-10-08 07:27:34 +0000 UTC" firstStartedPulling="2025-10-08 07:27:35.288122219 +0000 UTC m=+640.659087154" lastFinishedPulling="2025-10-08 07:27:37.926439337 +0000 UTC m=+643.297404272" observedRunningTime="2025-10-08 07:27:38.669109207 +0000 UTC m=+644.040074182" watchObservedRunningTime="2025-10-08 07:27:38.679469525 +0000 UTC m=+644.050434490" Oct 08 07:27:38 crc kubenswrapper[4693]: I1008 07:27:38.684733 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-j9lkx" podStartSLOduration=1.882591856 podStartE2EDuration="4.684707505s" podCreationTimestamp="2025-10-08 07:27:34 +0000 UTC" firstStartedPulling="2025-10-08 07:27:35.122235252 +0000 UTC m=+640.493200187" lastFinishedPulling="2025-10-08 07:27:37.924350901 +0000 UTC m=+643.295315836" observedRunningTime="2025-10-08 07:27:38.645041192 +0000 UTC m=+644.016006167" watchObservedRunningTime="2025-10-08 07:27:38.684707505 +0000 UTC m=+644.055672480" Oct 08 07:27:41 crc kubenswrapper[4693]: I1008 07:27:41.645381 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-945gn" event={"ID":"ae5b67a9-cb0d-4f73-8353-2bba4708a176","Type":"ContainerStarted","Data":"e472d6317c24adee0565825a47609a6267fc7616a2c2d753885123125cd5893f"} Oct 08 07:27:41 crc kubenswrapper[4693]: I1008 07:27:41.676056 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-945gn" podStartSLOduration=2.712150545 podStartE2EDuration="7.675965216s" podCreationTimestamp="2025-10-08 07:27:34 +0000 UTC" firstStartedPulling="2025-10-08 07:27:35.543551756 +0000 UTC m=+640.914516681" lastFinishedPulling="2025-10-08 07:27:40.507366417 +0000 UTC m=+645.878331352" observedRunningTime="2025-10-08 07:27:41.669292897 +0000 UTC m=+647.040257832" watchObservedRunningTime="2025-10-08 07:27:41.675965216 +0000 UTC m=+647.046930181" Oct 08 07:27:45 crc kubenswrapper[4693]: I1008 07:27:45.114168 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-j9lkx" Oct 08 07:27:45 crc kubenswrapper[4693]: I1008 07:27:45.390101 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-5f85d897fd-z69cs" Oct 08 07:27:45 crc kubenswrapper[4693]: I1008 07:27:45.390183 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-5f85d897fd-z69cs" Oct 08 07:27:45 crc kubenswrapper[4693]: I1008 07:27:45.398762 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-5f85d897fd-z69cs" Oct 08 07:27:45 crc kubenswrapper[4693]: I1008 07:27:45.686906 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-5f85d897fd-z69cs" Oct 08 07:27:45 crc kubenswrapper[4693]: I1008 07:27:45.744171 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-x7lvf"] Oct 08 07:27:55 crc kubenswrapper[4693]: I1008 07:27:55.075535 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-fpzdq" Oct 08 07:28:10 crc kubenswrapper[4693]: I1008 07:28:10.605040 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j"] Oct 08 07:28:10 crc kubenswrapper[4693]: I1008 07:28:10.606635 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j" Oct 08 07:28:10 crc kubenswrapper[4693]: I1008 07:28:10.610141 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 08 07:28:10 crc kubenswrapper[4693]: I1008 07:28:10.618865 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j"] Oct 08 07:28:10 crc kubenswrapper[4693]: I1008 07:28:10.624468 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-54xsf\" (UniqueName: \"kubernetes.io/projected/b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9-kube-api-access-54xsf\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j\" (UID: \"b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j" Oct 08 07:28:10 crc kubenswrapper[4693]: I1008 07:28:10.624511 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j\" (UID: \"b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j" Oct 08 07:28:10 crc kubenswrapper[4693]: I1008 07:28:10.624557 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j\" (UID: \"b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j" Oct 08 07:28:10 crc kubenswrapper[4693]: I1008 07:28:10.725206 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-54xsf\" (UniqueName: \"kubernetes.io/projected/b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9-kube-api-access-54xsf\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j\" (UID: \"b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j" Oct 08 07:28:10 crc kubenswrapper[4693]: I1008 07:28:10.725256 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j\" (UID: \"b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j" Oct 08 07:28:10 crc kubenswrapper[4693]: I1008 07:28:10.725306 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j\" (UID: \"b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j" Oct 08 07:28:10 crc kubenswrapper[4693]: I1008 07:28:10.726034 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j\" (UID: \"b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j" Oct 08 07:28:10 crc kubenswrapper[4693]: I1008 07:28:10.726188 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j\" (UID: \"b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j" Oct 08 07:28:10 crc kubenswrapper[4693]: I1008 07:28:10.747429 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-54xsf\" (UniqueName: \"kubernetes.io/projected/b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9-kube-api-access-54xsf\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j\" (UID: \"b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j" Oct 08 07:28:10 crc kubenswrapper[4693]: I1008 07:28:10.793367 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-x7lvf" podUID="c90da226-e34a-4d72-a64d-132a45439e4d" containerName="console" containerID="cri-o://330e52d8e7f31edebf241f467b20234db56f0d59a6bba2348db8a915a89563e6" gracePeriod=15 Oct 08 07:28:10 crc kubenswrapper[4693]: I1008 07:28:10.922294 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j" Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.198335 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-x7lvf_c90da226-e34a-4d72-a64d-132a45439e4d/console/0.log" Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.198625 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.213526 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j"] Oct 08 07:28:11 crc kubenswrapper[4693]: W1008 07:28:11.223493 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb0fe86fd_7ab1_4654_a5b7_e4797e7b12d9.slice/crio-277dec26b33441ca96ee50f9f911bba9be43af92e4cbf0dd4a1e66b89a1d20ad WatchSource:0}: Error finding container 277dec26b33441ca96ee50f9f911bba9be43af92e4cbf0dd4a1e66b89a1d20ad: Status 404 returned error can't find the container with id 277dec26b33441ca96ee50f9f911bba9be43af92e4cbf0dd4a1e66b89a1d20ad Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.231676 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/c90da226-e34a-4d72-a64d-132a45439e4d-console-oauth-config\") pod \"c90da226-e34a-4d72-a64d-132a45439e4d\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.231721 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t7768\" (UniqueName: \"kubernetes.io/projected/c90da226-e34a-4d72-a64d-132a45439e4d-kube-api-access-t7768\") pod \"c90da226-e34a-4d72-a64d-132a45439e4d\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.231767 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/c90da226-e34a-4d72-a64d-132a45439e4d-console-config\") pod \"c90da226-e34a-4d72-a64d-132a45439e4d\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.231851 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/c90da226-e34a-4d72-a64d-132a45439e4d-console-serving-cert\") pod \"c90da226-e34a-4d72-a64d-132a45439e4d\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.231938 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c90da226-e34a-4d72-a64d-132a45439e4d-service-ca\") pod \"c90da226-e34a-4d72-a64d-132a45439e4d\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.231968 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c90da226-e34a-4d72-a64d-132a45439e4d-trusted-ca-bundle\") pod \"c90da226-e34a-4d72-a64d-132a45439e4d\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.232006 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/c90da226-e34a-4d72-a64d-132a45439e4d-oauth-serving-cert\") pod \"c90da226-e34a-4d72-a64d-132a45439e4d\" (UID: \"c90da226-e34a-4d72-a64d-132a45439e4d\") " Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.233110 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c90da226-e34a-4d72-a64d-132a45439e4d-console-config" (OuterVolumeSpecName: "console-config") pod "c90da226-e34a-4d72-a64d-132a45439e4d" (UID: "c90da226-e34a-4d72-a64d-132a45439e4d"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.233291 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c90da226-e34a-4d72-a64d-132a45439e4d-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "c90da226-e34a-4d72-a64d-132a45439e4d" (UID: "c90da226-e34a-4d72-a64d-132a45439e4d"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.233705 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c90da226-e34a-4d72-a64d-132a45439e4d-service-ca" (OuterVolumeSpecName: "service-ca") pod "c90da226-e34a-4d72-a64d-132a45439e4d" (UID: "c90da226-e34a-4d72-a64d-132a45439e4d"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.233895 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c90da226-e34a-4d72-a64d-132a45439e4d-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "c90da226-e34a-4d72-a64d-132a45439e4d" (UID: "c90da226-e34a-4d72-a64d-132a45439e4d"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.239393 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c90da226-e34a-4d72-a64d-132a45439e4d-kube-api-access-t7768" (OuterVolumeSpecName: "kube-api-access-t7768") pod "c90da226-e34a-4d72-a64d-132a45439e4d" (UID: "c90da226-e34a-4d72-a64d-132a45439e4d"). InnerVolumeSpecName "kube-api-access-t7768". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.239423 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c90da226-e34a-4d72-a64d-132a45439e4d-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "c90da226-e34a-4d72-a64d-132a45439e4d" (UID: "c90da226-e34a-4d72-a64d-132a45439e4d"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.239650 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c90da226-e34a-4d72-a64d-132a45439e4d-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "c90da226-e34a-4d72-a64d-132a45439e4d" (UID: "c90da226-e34a-4d72-a64d-132a45439e4d"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.333193 4693 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/c90da226-e34a-4d72-a64d-132a45439e4d-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.333225 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t7768\" (UniqueName: \"kubernetes.io/projected/c90da226-e34a-4d72-a64d-132a45439e4d-kube-api-access-t7768\") on node \"crc\" DevicePath \"\"" Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.333239 4693 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/c90da226-e34a-4d72-a64d-132a45439e4d-console-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.333250 4693 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/c90da226-e34a-4d72-a64d-132a45439e4d-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.333262 4693 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c90da226-e34a-4d72-a64d-132a45439e4d-service-ca\") on node \"crc\" DevicePath \"\"" Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.333273 4693 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c90da226-e34a-4d72-a64d-132a45439e4d-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.333283 4693 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/c90da226-e34a-4d72-a64d-132a45439e4d-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.884387 4693 generic.go:334] "Generic (PLEG): container finished" podID="b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9" containerID="cab1ee12ddbbb93981fce3c4b487a59828650ddd11eceea65230234984344fcc" exitCode=0 Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.884471 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j" event={"ID":"b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9","Type":"ContainerDied","Data":"cab1ee12ddbbb93981fce3c4b487a59828650ddd11eceea65230234984344fcc"} Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.884979 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j" event={"ID":"b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9","Type":"ContainerStarted","Data":"277dec26b33441ca96ee50f9f911bba9be43af92e4cbf0dd4a1e66b89a1d20ad"} Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.891358 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-x7lvf_c90da226-e34a-4d72-a64d-132a45439e4d/console/0.log" Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.891451 4693 generic.go:334] "Generic (PLEG): container finished" podID="c90da226-e34a-4d72-a64d-132a45439e4d" containerID="330e52d8e7f31edebf241f467b20234db56f0d59a6bba2348db8a915a89563e6" exitCode=2 Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.891548 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-x7lvf" Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.891932 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-x7lvf" event={"ID":"c90da226-e34a-4d72-a64d-132a45439e4d","Type":"ContainerDied","Data":"330e52d8e7f31edebf241f467b20234db56f0d59a6bba2348db8a915a89563e6"} Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.892008 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-x7lvf" event={"ID":"c90da226-e34a-4d72-a64d-132a45439e4d","Type":"ContainerDied","Data":"984945048f8e776ff956da2e08d166df4c94f62976d0ab2aa8b140bb196a7461"} Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.892095 4693 scope.go:117] "RemoveContainer" containerID="330e52d8e7f31edebf241f467b20234db56f0d59a6bba2348db8a915a89563e6" Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.924419 4693 scope.go:117] "RemoveContainer" containerID="330e52d8e7f31edebf241f467b20234db56f0d59a6bba2348db8a915a89563e6" Oct 08 07:28:11 crc kubenswrapper[4693]: E1008 07:28:11.926574 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"330e52d8e7f31edebf241f467b20234db56f0d59a6bba2348db8a915a89563e6\": container with ID starting with 330e52d8e7f31edebf241f467b20234db56f0d59a6bba2348db8a915a89563e6 not found: ID does not exist" containerID="330e52d8e7f31edebf241f467b20234db56f0d59a6bba2348db8a915a89563e6" Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.926972 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"330e52d8e7f31edebf241f467b20234db56f0d59a6bba2348db8a915a89563e6"} err="failed to get container status \"330e52d8e7f31edebf241f467b20234db56f0d59a6bba2348db8a915a89563e6\": rpc error: code = NotFound desc = could not find container \"330e52d8e7f31edebf241f467b20234db56f0d59a6bba2348db8a915a89563e6\": container with ID starting with 330e52d8e7f31edebf241f467b20234db56f0d59a6bba2348db8a915a89563e6 not found: ID does not exist" Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.940133 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-x7lvf"] Oct 08 07:28:11 crc kubenswrapper[4693]: I1008 07:28:11.944319 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-x7lvf"] Oct 08 07:28:13 crc kubenswrapper[4693]: I1008 07:28:13.375561 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c90da226-e34a-4d72-a64d-132a45439e4d" path="/var/lib/kubelet/pods/c90da226-e34a-4d72-a64d-132a45439e4d/volumes" Oct 08 07:28:17 crc kubenswrapper[4693]: I1008 07:28:17.936562 4693 generic.go:334] "Generic (PLEG): container finished" podID="b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9" containerID="f6179439dad97ccdd7f14e57f20bbe984a0096b6d197514697f1bbe5617fc559" exitCode=0 Oct 08 07:28:17 crc kubenswrapper[4693]: I1008 07:28:17.936646 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j" event={"ID":"b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9","Type":"ContainerDied","Data":"f6179439dad97ccdd7f14e57f20bbe984a0096b6d197514697f1bbe5617fc559"} Oct 08 07:28:18 crc kubenswrapper[4693]: I1008 07:28:18.950090 4693 generic.go:334] "Generic (PLEG): container finished" podID="b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9" containerID="bb3921d61c39e7d4955e47380d3b9a761315ba739d9771fc327c1b3550350469" exitCode=0 Oct 08 07:28:18 crc kubenswrapper[4693]: I1008 07:28:18.950142 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j" event={"ID":"b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9","Type":"ContainerDied","Data":"bb3921d61c39e7d4955e47380d3b9a761315ba739d9771fc327c1b3550350469"} Oct 08 07:28:20 crc kubenswrapper[4693]: I1008 07:28:20.305507 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j" Oct 08 07:28:20 crc kubenswrapper[4693]: I1008 07:28:20.465275 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9-bundle\") pod \"b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9\" (UID: \"b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9\") " Oct 08 07:28:20 crc kubenswrapper[4693]: I1008 07:28:20.465360 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-54xsf\" (UniqueName: \"kubernetes.io/projected/b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9-kube-api-access-54xsf\") pod \"b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9\" (UID: \"b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9\") " Oct 08 07:28:20 crc kubenswrapper[4693]: I1008 07:28:20.465386 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9-util\") pod \"b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9\" (UID: \"b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9\") " Oct 08 07:28:20 crc kubenswrapper[4693]: I1008 07:28:20.468467 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9-bundle" (OuterVolumeSpecName: "bundle") pod "b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9" (UID: "b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:28:20 crc kubenswrapper[4693]: I1008 07:28:20.474104 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9-kube-api-access-54xsf" (OuterVolumeSpecName: "kube-api-access-54xsf") pod "b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9" (UID: "b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9"). InnerVolumeSpecName "kube-api-access-54xsf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:28:20 crc kubenswrapper[4693]: I1008 07:28:20.475262 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9-util" (OuterVolumeSpecName: "util") pod "b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9" (UID: "b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:28:20 crc kubenswrapper[4693]: I1008 07:28:20.568302 4693 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:28:20 crc kubenswrapper[4693]: I1008 07:28:20.568380 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-54xsf\" (UniqueName: \"kubernetes.io/projected/b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9-kube-api-access-54xsf\") on node \"crc\" DevicePath \"\"" Oct 08 07:28:20 crc kubenswrapper[4693]: I1008 07:28:20.568414 4693 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9-util\") on node \"crc\" DevicePath \"\"" Oct 08 07:28:20 crc kubenswrapper[4693]: I1008 07:28:20.968484 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j" event={"ID":"b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9","Type":"ContainerDied","Data":"277dec26b33441ca96ee50f9f911bba9be43af92e4cbf0dd4a1e66b89a1d20ad"} Oct 08 07:28:20 crc kubenswrapper[4693]: I1008 07:28:20.968872 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="277dec26b33441ca96ee50f9f911bba9be43af92e4cbf0dd4a1e66b89a1d20ad" Oct 08 07:28:20 crc kubenswrapper[4693]: I1008 07:28:20.968584 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j" Oct 08 07:28:23 crc kubenswrapper[4693]: I1008 07:28:23.490520 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:28:23 crc kubenswrapper[4693]: I1008 07:28:23.490606 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:28:33 crc kubenswrapper[4693]: I1008 07:28:33.928660 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-6b6d7649c4-6krt4"] Oct 08 07:28:33 crc kubenswrapper[4693]: E1008 07:28:33.929459 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9" containerName="pull" Oct 08 07:28:33 crc kubenswrapper[4693]: I1008 07:28:33.929469 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9" containerName="pull" Oct 08 07:28:33 crc kubenswrapper[4693]: E1008 07:28:33.929480 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9" containerName="extract" Oct 08 07:28:33 crc kubenswrapper[4693]: I1008 07:28:33.929486 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9" containerName="extract" Oct 08 07:28:33 crc kubenswrapper[4693]: E1008 07:28:33.929497 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9" containerName="util" Oct 08 07:28:33 crc kubenswrapper[4693]: I1008 07:28:33.929502 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9" containerName="util" Oct 08 07:28:33 crc kubenswrapper[4693]: E1008 07:28:33.929517 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c90da226-e34a-4d72-a64d-132a45439e4d" containerName="console" Oct 08 07:28:33 crc kubenswrapper[4693]: I1008 07:28:33.929522 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="c90da226-e34a-4d72-a64d-132a45439e4d" containerName="console" Oct 08 07:28:33 crc kubenswrapper[4693]: I1008 07:28:33.929605 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9" containerName="extract" Oct 08 07:28:33 crc kubenswrapper[4693]: I1008 07:28:33.929617 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="c90da226-e34a-4d72-a64d-132a45439e4d" containerName="console" Oct 08 07:28:33 crc kubenswrapper[4693]: I1008 07:28:33.930003 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-6b6d7649c4-6krt4" Oct 08 07:28:33 crc kubenswrapper[4693]: I1008 07:28:33.934171 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Oct 08 07:28:33 crc kubenswrapper[4693]: I1008 07:28:33.934355 4693 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Oct 08 07:28:33 crc kubenswrapper[4693]: I1008 07:28:33.934534 4693 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Oct 08 07:28:33 crc kubenswrapper[4693]: I1008 07:28:33.934736 4693 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-n94nv" Oct 08 07:28:33 crc kubenswrapper[4693]: I1008 07:28:33.934889 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Oct 08 07:28:33 crc kubenswrapper[4693]: I1008 07:28:33.956847 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-6b6d7649c4-6krt4"] Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.047354 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/20b2cb2b-9d01-44fa-a40e-2375df3a92d7-webhook-cert\") pod \"metallb-operator-controller-manager-6b6d7649c4-6krt4\" (UID: \"20b2cb2b-9d01-44fa-a40e-2375df3a92d7\") " pod="metallb-system/metallb-operator-controller-manager-6b6d7649c4-6krt4" Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.047442 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfs88\" (UniqueName: \"kubernetes.io/projected/20b2cb2b-9d01-44fa-a40e-2375df3a92d7-kube-api-access-nfs88\") pod \"metallb-operator-controller-manager-6b6d7649c4-6krt4\" (UID: \"20b2cb2b-9d01-44fa-a40e-2375df3a92d7\") " pod="metallb-system/metallb-operator-controller-manager-6b6d7649c4-6krt4" Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.047471 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/20b2cb2b-9d01-44fa-a40e-2375df3a92d7-apiservice-cert\") pod \"metallb-operator-controller-manager-6b6d7649c4-6krt4\" (UID: \"20b2cb2b-9d01-44fa-a40e-2375df3a92d7\") " pod="metallb-system/metallb-operator-controller-manager-6b6d7649c4-6krt4" Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.148617 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfs88\" (UniqueName: \"kubernetes.io/projected/20b2cb2b-9d01-44fa-a40e-2375df3a92d7-kube-api-access-nfs88\") pod \"metallb-operator-controller-manager-6b6d7649c4-6krt4\" (UID: \"20b2cb2b-9d01-44fa-a40e-2375df3a92d7\") " pod="metallb-system/metallb-operator-controller-manager-6b6d7649c4-6krt4" Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.148741 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/20b2cb2b-9d01-44fa-a40e-2375df3a92d7-apiservice-cert\") pod \"metallb-operator-controller-manager-6b6d7649c4-6krt4\" (UID: \"20b2cb2b-9d01-44fa-a40e-2375df3a92d7\") " pod="metallb-system/metallb-operator-controller-manager-6b6d7649c4-6krt4" Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.148832 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/20b2cb2b-9d01-44fa-a40e-2375df3a92d7-webhook-cert\") pod \"metallb-operator-controller-manager-6b6d7649c4-6krt4\" (UID: \"20b2cb2b-9d01-44fa-a40e-2375df3a92d7\") " pod="metallb-system/metallb-operator-controller-manager-6b6d7649c4-6krt4" Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.155969 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/20b2cb2b-9d01-44fa-a40e-2375df3a92d7-apiservice-cert\") pod \"metallb-operator-controller-manager-6b6d7649c4-6krt4\" (UID: \"20b2cb2b-9d01-44fa-a40e-2375df3a92d7\") " pod="metallb-system/metallb-operator-controller-manager-6b6d7649c4-6krt4" Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.167855 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfs88\" (UniqueName: \"kubernetes.io/projected/20b2cb2b-9d01-44fa-a40e-2375df3a92d7-kube-api-access-nfs88\") pod \"metallb-operator-controller-manager-6b6d7649c4-6krt4\" (UID: \"20b2cb2b-9d01-44fa-a40e-2375df3a92d7\") " pod="metallb-system/metallb-operator-controller-manager-6b6d7649c4-6krt4" Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.170103 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/20b2cb2b-9d01-44fa-a40e-2375df3a92d7-webhook-cert\") pod \"metallb-operator-controller-manager-6b6d7649c4-6krt4\" (UID: \"20b2cb2b-9d01-44fa-a40e-2375df3a92d7\") " pod="metallb-system/metallb-operator-controller-manager-6b6d7649c4-6krt4" Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.191973 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-6559c8fcd-4bbqs"] Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.193125 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6559c8fcd-4bbqs" Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.194985 4693 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-c7gn8" Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.195311 4693 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.195648 4693 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.210769 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6559c8fcd-4bbqs"] Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.249682 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-6b6d7649c4-6krt4" Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.351353 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9db7ae35-d57f-4342-8a8c-ff3613e28905-apiservice-cert\") pod \"metallb-operator-webhook-server-6559c8fcd-4bbqs\" (UID: \"9db7ae35-d57f-4342-8a8c-ff3613e28905\") " pod="metallb-system/metallb-operator-webhook-server-6559c8fcd-4bbqs" Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.351465 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75kcp\" (UniqueName: \"kubernetes.io/projected/9db7ae35-d57f-4342-8a8c-ff3613e28905-kube-api-access-75kcp\") pod \"metallb-operator-webhook-server-6559c8fcd-4bbqs\" (UID: \"9db7ae35-d57f-4342-8a8c-ff3613e28905\") " pod="metallb-system/metallb-operator-webhook-server-6559c8fcd-4bbqs" Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.351535 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9db7ae35-d57f-4342-8a8c-ff3613e28905-webhook-cert\") pod \"metallb-operator-webhook-server-6559c8fcd-4bbqs\" (UID: \"9db7ae35-d57f-4342-8a8c-ff3613e28905\") " pod="metallb-system/metallb-operator-webhook-server-6559c8fcd-4bbqs" Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.452461 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75kcp\" (UniqueName: \"kubernetes.io/projected/9db7ae35-d57f-4342-8a8c-ff3613e28905-kube-api-access-75kcp\") pod \"metallb-operator-webhook-server-6559c8fcd-4bbqs\" (UID: \"9db7ae35-d57f-4342-8a8c-ff3613e28905\") " pod="metallb-system/metallb-operator-webhook-server-6559c8fcd-4bbqs" Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.452531 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9db7ae35-d57f-4342-8a8c-ff3613e28905-webhook-cert\") pod \"metallb-operator-webhook-server-6559c8fcd-4bbqs\" (UID: \"9db7ae35-d57f-4342-8a8c-ff3613e28905\") " pod="metallb-system/metallb-operator-webhook-server-6559c8fcd-4bbqs" Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.452556 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9db7ae35-d57f-4342-8a8c-ff3613e28905-apiservice-cert\") pod \"metallb-operator-webhook-server-6559c8fcd-4bbqs\" (UID: \"9db7ae35-d57f-4342-8a8c-ff3613e28905\") " pod="metallb-system/metallb-operator-webhook-server-6559c8fcd-4bbqs" Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.466051 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-6b6d7649c4-6krt4"] Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.467636 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9db7ae35-d57f-4342-8a8c-ff3613e28905-apiservice-cert\") pod \"metallb-operator-webhook-server-6559c8fcd-4bbqs\" (UID: \"9db7ae35-d57f-4342-8a8c-ff3613e28905\") " pod="metallb-system/metallb-operator-webhook-server-6559c8fcd-4bbqs" Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.476249 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9db7ae35-d57f-4342-8a8c-ff3613e28905-webhook-cert\") pod \"metallb-operator-webhook-server-6559c8fcd-4bbqs\" (UID: \"9db7ae35-d57f-4342-8a8c-ff3613e28905\") " pod="metallb-system/metallb-operator-webhook-server-6559c8fcd-4bbqs" Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.479940 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75kcp\" (UniqueName: \"kubernetes.io/projected/9db7ae35-d57f-4342-8a8c-ff3613e28905-kube-api-access-75kcp\") pod \"metallb-operator-webhook-server-6559c8fcd-4bbqs\" (UID: \"9db7ae35-d57f-4342-8a8c-ff3613e28905\") " pod="metallb-system/metallb-operator-webhook-server-6559c8fcd-4bbqs" Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.559281 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6559c8fcd-4bbqs" Oct 08 07:28:34 crc kubenswrapper[4693]: I1008 07:28:34.765432 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6559c8fcd-4bbqs"] Oct 08 07:28:34 crc kubenswrapper[4693]: W1008 07:28:34.781301 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9db7ae35_d57f_4342_8a8c_ff3613e28905.slice/crio-148cd5a1aa4226c51c67df114d834bfa52c547830d2d55b86c1cb311e9740631 WatchSource:0}: Error finding container 148cd5a1aa4226c51c67df114d834bfa52c547830d2d55b86c1cb311e9740631: Status 404 returned error can't find the container with id 148cd5a1aa4226c51c67df114d834bfa52c547830d2d55b86c1cb311e9740631 Oct 08 07:28:35 crc kubenswrapper[4693]: I1008 07:28:35.057323 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-6b6d7649c4-6krt4" event={"ID":"20b2cb2b-9d01-44fa-a40e-2375df3a92d7","Type":"ContainerStarted","Data":"12f10953b5acf08c7775fd6b21f4d617792cd64988882e62d7449b08323ae8b0"} Oct 08 07:28:35 crc kubenswrapper[4693]: I1008 07:28:35.059219 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6559c8fcd-4bbqs" event={"ID":"9db7ae35-d57f-4342-8a8c-ff3613e28905","Type":"ContainerStarted","Data":"148cd5a1aa4226c51c67df114d834bfa52c547830d2d55b86c1cb311e9740631"} Oct 08 07:28:40 crc kubenswrapper[4693]: I1008 07:28:40.094383 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6559c8fcd-4bbqs" event={"ID":"9db7ae35-d57f-4342-8a8c-ff3613e28905","Type":"ContainerStarted","Data":"6f9799c90b8264a3c63bc92ee0cf2ec6a4ab2c11679790ffae0e338ed98e3ef1"} Oct 08 07:28:40 crc kubenswrapper[4693]: I1008 07:28:40.096262 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-6559c8fcd-4bbqs" Oct 08 07:28:40 crc kubenswrapper[4693]: I1008 07:28:40.098092 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-6b6d7649c4-6krt4" event={"ID":"20b2cb2b-9d01-44fa-a40e-2375df3a92d7","Type":"ContainerStarted","Data":"62e5e84dba33aac79fab3f97b16fb92197427550046540b8ba59f8b409b4e84e"} Oct 08 07:28:40 crc kubenswrapper[4693]: I1008 07:28:40.098806 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-6b6d7649c4-6krt4" Oct 08 07:28:40 crc kubenswrapper[4693]: I1008 07:28:40.119421 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-6559c8fcd-4bbqs" podStartSLOduration=1.732753226 podStartE2EDuration="6.119400516s" podCreationTimestamp="2025-10-08 07:28:34 +0000 UTC" firstStartedPulling="2025-10-08 07:28:34.784154847 +0000 UTC m=+700.155119782" lastFinishedPulling="2025-10-08 07:28:39.170802097 +0000 UTC m=+704.541767072" observedRunningTime="2025-10-08 07:28:40.11915894 +0000 UTC m=+705.490123905" watchObservedRunningTime="2025-10-08 07:28:40.119400516 +0000 UTC m=+705.490365461" Oct 08 07:28:40 crc kubenswrapper[4693]: I1008 07:28:40.151263 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-6b6d7649c4-6krt4" podStartSLOduration=2.511828773 podStartE2EDuration="7.151231885s" podCreationTimestamp="2025-10-08 07:28:33 +0000 UTC" firstStartedPulling="2025-10-08 07:28:34.480963917 +0000 UTC m=+699.851928852" lastFinishedPulling="2025-10-08 07:28:39.120367029 +0000 UTC m=+704.491331964" observedRunningTime="2025-10-08 07:28:40.149653728 +0000 UTC m=+705.520618663" watchObservedRunningTime="2025-10-08 07:28:40.151231885 +0000 UTC m=+705.522196860" Oct 08 07:28:53 crc kubenswrapper[4693]: I1008 07:28:53.489529 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:28:53 crc kubenswrapper[4693]: I1008 07:28:53.490204 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:28:54 crc kubenswrapper[4693]: I1008 07:28:54.569966 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-6559c8fcd-4bbqs" Oct 08 07:29:14 crc kubenswrapper[4693]: I1008 07:29:14.252622 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-6b6d7649c4-6krt4" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.186049 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-jglgg"] Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.190572 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-jglgg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.191409 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-4857d"] Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.193227 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-4857d" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.194250 4693 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-jpz22" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.194738 4693 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.195086 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.201127 4693 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.205078 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-4857d"] Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.279415 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-tjkwg"] Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.280459 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-tjkwg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.281724 4693 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.282199 4693 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-hrc94" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.282499 4693 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.283319 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.287077 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-68d546b9d8-nj6r5"] Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.287944 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-nj6r5" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.289006 4693 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.299485 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-nj6r5"] Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.324358 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5prlf\" (UniqueName: \"kubernetes.io/projected/a7633c77-af8c-4789-88d5-fbbb01f3e751-kube-api-access-5prlf\") pod \"frr-k8s-jglgg\" (UID: \"a7633c77-af8c-4789-88d5-fbbb01f3e751\") " pod="metallb-system/frr-k8s-jglgg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.324418 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b17ddd86-5c6e-4898-a859-ce43f604fc10-cert\") pod \"frr-k8s-webhook-server-64bf5d555-4857d\" (UID: \"b17ddd86-5c6e-4898-a859-ce43f604fc10\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-4857d" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.324445 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a7633c77-af8c-4789-88d5-fbbb01f3e751-metrics-certs\") pod \"frr-k8s-jglgg\" (UID: \"a7633c77-af8c-4789-88d5-fbbb01f3e751\") " pod="metallb-system/frr-k8s-jglgg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.324491 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/a7633c77-af8c-4789-88d5-fbbb01f3e751-frr-conf\") pod \"frr-k8s-jglgg\" (UID: \"a7633c77-af8c-4789-88d5-fbbb01f3e751\") " pod="metallb-system/frr-k8s-jglgg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.324735 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/a7633c77-af8c-4789-88d5-fbbb01f3e751-reloader\") pod \"frr-k8s-jglgg\" (UID: \"a7633c77-af8c-4789-88d5-fbbb01f3e751\") " pod="metallb-system/frr-k8s-jglgg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.324797 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/a7633c77-af8c-4789-88d5-fbbb01f3e751-frr-sockets\") pod \"frr-k8s-jglgg\" (UID: \"a7633c77-af8c-4789-88d5-fbbb01f3e751\") " pod="metallb-system/frr-k8s-jglgg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.324840 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnkh6\" (UniqueName: \"kubernetes.io/projected/b17ddd86-5c6e-4898-a859-ce43f604fc10-kube-api-access-gnkh6\") pod \"frr-k8s-webhook-server-64bf5d555-4857d\" (UID: \"b17ddd86-5c6e-4898-a859-ce43f604fc10\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-4857d" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.324908 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/a7633c77-af8c-4789-88d5-fbbb01f3e751-metrics\") pod \"frr-k8s-jglgg\" (UID: \"a7633c77-af8c-4789-88d5-fbbb01f3e751\") " pod="metallb-system/frr-k8s-jglgg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.324958 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/a7633c77-af8c-4789-88d5-fbbb01f3e751-frr-startup\") pod \"frr-k8s-jglgg\" (UID: \"a7633c77-af8c-4789-88d5-fbbb01f3e751\") " pod="metallb-system/frr-k8s-jglgg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.427499 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b17ddd86-5c6e-4898-a859-ce43f604fc10-cert\") pod \"frr-k8s-webhook-server-64bf5d555-4857d\" (UID: \"b17ddd86-5c6e-4898-a859-ce43f604fc10\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-4857d" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.427547 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a7633c77-af8c-4789-88d5-fbbb01f3e751-metrics-certs\") pod \"frr-k8s-jglgg\" (UID: \"a7633c77-af8c-4789-88d5-fbbb01f3e751\") " pod="metallb-system/frr-k8s-jglgg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.427592 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/88513bc0-a703-458f-a001-d6a636023c45-metallb-excludel2\") pod \"speaker-tjkwg\" (UID: \"88513bc0-a703-458f-a001-d6a636023c45\") " pod="metallb-system/speaker-tjkwg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.427615 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/a7633c77-af8c-4789-88d5-fbbb01f3e751-reloader\") pod \"frr-k8s-jglgg\" (UID: \"a7633c77-af8c-4789-88d5-fbbb01f3e751\") " pod="metallb-system/frr-k8s-jglgg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.427629 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/a7633c77-af8c-4789-88d5-fbbb01f3e751-frr-conf\") pod \"frr-k8s-jglgg\" (UID: \"a7633c77-af8c-4789-88d5-fbbb01f3e751\") " pod="metallb-system/frr-k8s-jglgg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.427649 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/a7633c77-af8c-4789-88d5-fbbb01f3e751-frr-sockets\") pod \"frr-k8s-jglgg\" (UID: \"a7633c77-af8c-4789-88d5-fbbb01f3e751\") " pod="metallb-system/frr-k8s-jglgg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.427668 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnkh6\" (UniqueName: \"kubernetes.io/projected/b17ddd86-5c6e-4898-a859-ce43f604fc10-kube-api-access-gnkh6\") pod \"frr-k8s-webhook-server-64bf5d555-4857d\" (UID: \"b17ddd86-5c6e-4898-a859-ce43f604fc10\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-4857d" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.427696 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/88513bc0-a703-458f-a001-d6a636023c45-metrics-certs\") pod \"speaker-tjkwg\" (UID: \"88513bc0-a703-458f-a001-d6a636023c45\") " pod="metallb-system/speaker-tjkwg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.427715 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/a7633c77-af8c-4789-88d5-fbbb01f3e751-metrics\") pod \"frr-k8s-jglgg\" (UID: \"a7633c77-af8c-4789-88d5-fbbb01f3e751\") " pod="metallb-system/frr-k8s-jglgg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.427731 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgvrr\" (UniqueName: \"kubernetes.io/projected/99056ede-9949-4966-a265-fc3af4134013-kube-api-access-cgvrr\") pod \"controller-68d546b9d8-nj6r5\" (UID: \"99056ede-9949-4966-a265-fc3af4134013\") " pod="metallb-system/controller-68d546b9d8-nj6r5" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.427770 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/88513bc0-a703-458f-a001-d6a636023c45-memberlist\") pod \"speaker-tjkwg\" (UID: \"88513bc0-a703-458f-a001-d6a636023c45\") " pod="metallb-system/speaker-tjkwg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.427792 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/a7633c77-af8c-4789-88d5-fbbb01f3e751-frr-startup\") pod \"frr-k8s-jglgg\" (UID: \"a7633c77-af8c-4789-88d5-fbbb01f3e751\") " pod="metallb-system/frr-k8s-jglgg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.427820 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/99056ede-9949-4966-a265-fc3af4134013-cert\") pod \"controller-68d546b9d8-nj6r5\" (UID: \"99056ede-9949-4966-a265-fc3af4134013\") " pod="metallb-system/controller-68d546b9d8-nj6r5" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.427845 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbw74\" (UniqueName: \"kubernetes.io/projected/88513bc0-a703-458f-a001-d6a636023c45-kube-api-access-dbw74\") pod \"speaker-tjkwg\" (UID: \"88513bc0-a703-458f-a001-d6a636023c45\") " pod="metallb-system/speaker-tjkwg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.427862 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5prlf\" (UniqueName: \"kubernetes.io/projected/a7633c77-af8c-4789-88d5-fbbb01f3e751-kube-api-access-5prlf\") pod \"frr-k8s-jglgg\" (UID: \"a7633c77-af8c-4789-88d5-fbbb01f3e751\") " pod="metallb-system/frr-k8s-jglgg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.427877 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99056ede-9949-4966-a265-fc3af4134013-metrics-certs\") pod \"controller-68d546b9d8-nj6r5\" (UID: \"99056ede-9949-4966-a265-fc3af4134013\") " pod="metallb-system/controller-68d546b9d8-nj6r5" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.428212 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/a7633c77-af8c-4789-88d5-fbbb01f3e751-frr-sockets\") pod \"frr-k8s-jglgg\" (UID: \"a7633c77-af8c-4789-88d5-fbbb01f3e751\") " pod="metallb-system/frr-k8s-jglgg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.428447 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/a7633c77-af8c-4789-88d5-fbbb01f3e751-metrics\") pod \"frr-k8s-jglgg\" (UID: \"a7633c77-af8c-4789-88d5-fbbb01f3e751\") " pod="metallb-system/frr-k8s-jglgg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.429368 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/a7633c77-af8c-4789-88d5-fbbb01f3e751-frr-startup\") pod \"frr-k8s-jglgg\" (UID: \"a7633c77-af8c-4789-88d5-fbbb01f3e751\") " pod="metallb-system/frr-k8s-jglgg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.430467 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/a7633c77-af8c-4789-88d5-fbbb01f3e751-reloader\") pod \"frr-k8s-jglgg\" (UID: \"a7633c77-af8c-4789-88d5-fbbb01f3e751\") " pod="metallb-system/frr-k8s-jglgg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.430656 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/a7633c77-af8c-4789-88d5-fbbb01f3e751-frr-conf\") pod \"frr-k8s-jglgg\" (UID: \"a7633c77-af8c-4789-88d5-fbbb01f3e751\") " pod="metallb-system/frr-k8s-jglgg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.437066 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a7633c77-af8c-4789-88d5-fbbb01f3e751-metrics-certs\") pod \"frr-k8s-jglgg\" (UID: \"a7633c77-af8c-4789-88d5-fbbb01f3e751\") " pod="metallb-system/frr-k8s-jglgg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.437402 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b17ddd86-5c6e-4898-a859-ce43f604fc10-cert\") pod \"frr-k8s-webhook-server-64bf5d555-4857d\" (UID: \"b17ddd86-5c6e-4898-a859-ce43f604fc10\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-4857d" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.468682 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnkh6\" (UniqueName: \"kubernetes.io/projected/b17ddd86-5c6e-4898-a859-ce43f604fc10-kube-api-access-gnkh6\") pod \"frr-k8s-webhook-server-64bf5d555-4857d\" (UID: \"b17ddd86-5c6e-4898-a859-ce43f604fc10\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-4857d" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.473506 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5prlf\" (UniqueName: \"kubernetes.io/projected/a7633c77-af8c-4789-88d5-fbbb01f3e751-kube-api-access-5prlf\") pod \"frr-k8s-jglgg\" (UID: \"a7633c77-af8c-4789-88d5-fbbb01f3e751\") " pod="metallb-system/frr-k8s-jglgg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.516229 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-jglgg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.524720 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-4857d" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.530420 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgvrr\" (UniqueName: \"kubernetes.io/projected/99056ede-9949-4966-a265-fc3af4134013-kube-api-access-cgvrr\") pod \"controller-68d546b9d8-nj6r5\" (UID: \"99056ede-9949-4966-a265-fc3af4134013\") " pod="metallb-system/controller-68d546b9d8-nj6r5" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.530455 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/88513bc0-a703-458f-a001-d6a636023c45-memberlist\") pod \"speaker-tjkwg\" (UID: \"88513bc0-a703-458f-a001-d6a636023c45\") " pod="metallb-system/speaker-tjkwg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.530478 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/99056ede-9949-4966-a265-fc3af4134013-cert\") pod \"controller-68d546b9d8-nj6r5\" (UID: \"99056ede-9949-4966-a265-fc3af4134013\") " pod="metallb-system/controller-68d546b9d8-nj6r5" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.530501 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbw74\" (UniqueName: \"kubernetes.io/projected/88513bc0-a703-458f-a001-d6a636023c45-kube-api-access-dbw74\") pod \"speaker-tjkwg\" (UID: \"88513bc0-a703-458f-a001-d6a636023c45\") " pod="metallb-system/speaker-tjkwg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.530518 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99056ede-9949-4966-a265-fc3af4134013-metrics-certs\") pod \"controller-68d546b9d8-nj6r5\" (UID: \"99056ede-9949-4966-a265-fc3af4134013\") " pod="metallb-system/controller-68d546b9d8-nj6r5" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.530554 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/88513bc0-a703-458f-a001-d6a636023c45-metallb-excludel2\") pod \"speaker-tjkwg\" (UID: \"88513bc0-a703-458f-a001-d6a636023c45\") " pod="metallb-system/speaker-tjkwg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.530591 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/88513bc0-a703-458f-a001-d6a636023c45-metrics-certs\") pod \"speaker-tjkwg\" (UID: \"88513bc0-a703-458f-a001-d6a636023c45\") " pod="metallb-system/speaker-tjkwg" Oct 08 07:29:15 crc kubenswrapper[4693]: E1008 07:29:15.530634 4693 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 08 07:29:15 crc kubenswrapper[4693]: E1008 07:29:15.530688 4693 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Oct 08 07:29:15 crc kubenswrapper[4693]: E1008 07:29:15.530706 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/88513bc0-a703-458f-a001-d6a636023c45-memberlist podName:88513bc0-a703-458f-a001-d6a636023c45 nodeName:}" failed. No retries permitted until 2025-10-08 07:29:16.030686134 +0000 UTC m=+741.401651069 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/88513bc0-a703-458f-a001-d6a636023c45-memberlist") pod "speaker-tjkwg" (UID: "88513bc0-a703-458f-a001-d6a636023c45") : secret "metallb-memberlist" not found Oct 08 07:29:15 crc kubenswrapper[4693]: E1008 07:29:15.530733 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/88513bc0-a703-458f-a001-d6a636023c45-metrics-certs podName:88513bc0-a703-458f-a001-d6a636023c45 nodeName:}" failed. No retries permitted until 2025-10-08 07:29:16.030717795 +0000 UTC m=+741.401682720 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/88513bc0-a703-458f-a001-d6a636023c45-metrics-certs") pod "speaker-tjkwg" (UID: "88513bc0-a703-458f-a001-d6a636023c45") : secret "speaker-certs-secret" not found Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.531433 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/88513bc0-a703-458f-a001-d6a636023c45-metallb-excludel2\") pod \"speaker-tjkwg\" (UID: \"88513bc0-a703-458f-a001-d6a636023c45\") " pod="metallb-system/speaker-tjkwg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.533404 4693 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.534484 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99056ede-9949-4966-a265-fc3af4134013-metrics-certs\") pod \"controller-68d546b9d8-nj6r5\" (UID: \"99056ede-9949-4966-a265-fc3af4134013\") " pod="metallb-system/controller-68d546b9d8-nj6r5" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.543451 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/99056ede-9949-4966-a265-fc3af4134013-cert\") pod \"controller-68d546b9d8-nj6r5\" (UID: \"99056ede-9949-4966-a265-fc3af4134013\") " pod="metallb-system/controller-68d546b9d8-nj6r5" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.554252 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbw74\" (UniqueName: \"kubernetes.io/projected/88513bc0-a703-458f-a001-d6a636023c45-kube-api-access-dbw74\") pod \"speaker-tjkwg\" (UID: \"88513bc0-a703-458f-a001-d6a636023c45\") " pod="metallb-system/speaker-tjkwg" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.554772 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgvrr\" (UniqueName: \"kubernetes.io/projected/99056ede-9949-4966-a265-fc3af4134013-kube-api-access-cgvrr\") pod \"controller-68d546b9d8-nj6r5\" (UID: \"99056ede-9949-4966-a265-fc3af4134013\") " pod="metallb-system/controller-68d546b9d8-nj6r5" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.600599 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-nj6r5" Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.796233 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-nj6r5"] Oct 08 07:29:15 crc kubenswrapper[4693]: W1008 07:29:15.798667 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod99056ede_9949_4966_a265_fc3af4134013.slice/crio-d8f2a73ea55f67a510fd4c6ea6236ad2381f243090f8f8403f70b7880118fa17 WatchSource:0}: Error finding container d8f2a73ea55f67a510fd4c6ea6236ad2381f243090f8f8403f70b7880118fa17: Status 404 returned error can't find the container with id d8f2a73ea55f67a510fd4c6ea6236ad2381f243090f8f8403f70b7880118fa17 Oct 08 07:29:15 crc kubenswrapper[4693]: I1008 07:29:15.928224 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-4857d"] Oct 08 07:29:15 crc kubenswrapper[4693]: W1008 07:29:15.936540 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb17ddd86_5c6e_4898_a859_ce43f604fc10.slice/crio-1aceffb98849adda63021bac3aa87cd41ddd727e54a9887560aa535677577eab WatchSource:0}: Error finding container 1aceffb98849adda63021bac3aa87cd41ddd727e54a9887560aa535677577eab: Status 404 returned error can't find the container with id 1aceffb98849adda63021bac3aa87cd41ddd727e54a9887560aa535677577eab Oct 08 07:29:16 crc kubenswrapper[4693]: I1008 07:29:16.035577 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/88513bc0-a703-458f-a001-d6a636023c45-metrics-certs\") pod \"speaker-tjkwg\" (UID: \"88513bc0-a703-458f-a001-d6a636023c45\") " pod="metallb-system/speaker-tjkwg" Oct 08 07:29:16 crc kubenswrapper[4693]: I1008 07:29:16.035638 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/88513bc0-a703-458f-a001-d6a636023c45-memberlist\") pod \"speaker-tjkwg\" (UID: \"88513bc0-a703-458f-a001-d6a636023c45\") " pod="metallb-system/speaker-tjkwg" Oct 08 07:29:16 crc kubenswrapper[4693]: I1008 07:29:16.041614 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/88513bc0-a703-458f-a001-d6a636023c45-memberlist\") pod \"speaker-tjkwg\" (UID: \"88513bc0-a703-458f-a001-d6a636023c45\") " pod="metallb-system/speaker-tjkwg" Oct 08 07:29:16 crc kubenswrapper[4693]: I1008 07:29:16.041735 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/88513bc0-a703-458f-a001-d6a636023c45-metrics-certs\") pod \"speaker-tjkwg\" (UID: \"88513bc0-a703-458f-a001-d6a636023c45\") " pod="metallb-system/speaker-tjkwg" Oct 08 07:29:16 crc kubenswrapper[4693]: I1008 07:29:16.193184 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-tjkwg" Oct 08 07:29:16 crc kubenswrapper[4693]: W1008 07:29:16.227455 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod88513bc0_a703_458f_a001_d6a636023c45.slice/crio-c439197f61c6d47dc3918680372aa402ab402eab7971254e5d505b82951cbc0b WatchSource:0}: Error finding container c439197f61c6d47dc3918680372aa402ab402eab7971254e5d505b82951cbc0b: Status 404 returned error can't find the container with id c439197f61c6d47dc3918680372aa402ab402eab7971254e5d505b82951cbc0b Oct 08 07:29:16 crc kubenswrapper[4693]: I1008 07:29:16.354190 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-tjkwg" event={"ID":"88513bc0-a703-458f-a001-d6a636023c45","Type":"ContainerStarted","Data":"c439197f61c6d47dc3918680372aa402ab402eab7971254e5d505b82951cbc0b"} Oct 08 07:29:16 crc kubenswrapper[4693]: I1008 07:29:16.373209 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-nj6r5" event={"ID":"99056ede-9949-4966-a265-fc3af4134013","Type":"ContainerStarted","Data":"9bd92c2e8bfa93fe420e9cf98daafff04c1cb7bf92e1eec55a1077a7e996bb06"} Oct 08 07:29:16 crc kubenswrapper[4693]: I1008 07:29:16.373283 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-nj6r5" event={"ID":"99056ede-9949-4966-a265-fc3af4134013","Type":"ContainerStarted","Data":"c22f34013af22f555614fa6772f5509eea732ae573f30424fb2c912e08d49462"} Oct 08 07:29:16 crc kubenswrapper[4693]: I1008 07:29:16.373309 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-nj6r5" event={"ID":"99056ede-9949-4966-a265-fc3af4134013","Type":"ContainerStarted","Data":"d8f2a73ea55f67a510fd4c6ea6236ad2381f243090f8f8403f70b7880118fa17"} Oct 08 07:29:16 crc kubenswrapper[4693]: I1008 07:29:16.373600 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-68d546b9d8-nj6r5" Oct 08 07:29:16 crc kubenswrapper[4693]: I1008 07:29:16.374944 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-4857d" event={"ID":"b17ddd86-5c6e-4898-a859-ce43f604fc10","Type":"ContainerStarted","Data":"1aceffb98849adda63021bac3aa87cd41ddd727e54a9887560aa535677577eab"} Oct 08 07:29:16 crc kubenswrapper[4693]: I1008 07:29:16.378177 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jglgg" event={"ID":"a7633c77-af8c-4789-88d5-fbbb01f3e751","Type":"ContainerStarted","Data":"f12b17bf63da9e2f4486d474387a211bcc25a07c8110ae6c7d72d5e0462af2c3"} Oct 08 07:29:16 crc kubenswrapper[4693]: I1008 07:29:16.400990 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-68d546b9d8-nj6r5" podStartSLOduration=1.400970332 podStartE2EDuration="1.400970332s" podCreationTimestamp="2025-10-08 07:29:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:29:16.398719833 +0000 UTC m=+741.769684778" watchObservedRunningTime="2025-10-08 07:29:16.400970332 +0000 UTC m=+741.771935277" Oct 08 07:29:17 crc kubenswrapper[4693]: I1008 07:29:17.390943 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-tjkwg" event={"ID":"88513bc0-a703-458f-a001-d6a636023c45","Type":"ContainerStarted","Data":"fc4cf92b73ced1267ad7eead82578f3bc919cf7a41de0223f9b429cba1dedce9"} Oct 08 07:29:17 crc kubenswrapper[4693]: I1008 07:29:17.391343 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-tjkwg" event={"ID":"88513bc0-a703-458f-a001-d6a636023c45","Type":"ContainerStarted","Data":"d4a789cc0ddfab25124c1fc77c307d0b7689fcbda8cfabbe5a14e5a96bb74eef"} Oct 08 07:29:18 crc kubenswrapper[4693]: I1008 07:29:18.404219 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-tjkwg" Oct 08 07:29:21 crc kubenswrapper[4693]: I1008 07:29:21.842845 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-tjkwg" podStartSLOduration=6.84280589 podStartE2EDuration="6.84280589s" podCreationTimestamp="2025-10-08 07:29:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:29:17.421218369 +0000 UTC m=+742.792183344" watchObservedRunningTime="2025-10-08 07:29:21.84280589 +0000 UTC m=+747.213770845" Oct 08 07:29:21 crc kubenswrapper[4693]: I1008 07:29:21.846327 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-pcr5x"] Oct 08 07:29:21 crc kubenswrapper[4693]: I1008 07:29:21.846526 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" podUID="927ac9fe-c982-487c-8258-e137f2ba8cdb" containerName="controller-manager" containerID="cri-o://1e14bca08628a73687d55ffca86c228c41e8dbad53c9884f9e69eaf887607e04" gracePeriod=30 Oct 08 07:29:21 crc kubenswrapper[4693]: I1008 07:29:21.937012 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc"] Oct 08 07:29:21 crc kubenswrapper[4693]: I1008 07:29:21.937274 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc" podUID="f13d519d-a936-4e80-b8a3-f1946cb85ac3" containerName="route-controller-manager" containerID="cri-o://e4832cc2a81e3d152e57b123ee039d8adf7a9972d8e042cf4f83d7606733ac18" gracePeriod=30 Oct 08 07:29:22 crc kubenswrapper[4693]: I1008 07:29:22.455467 4693 generic.go:334] "Generic (PLEG): container finished" podID="f13d519d-a936-4e80-b8a3-f1946cb85ac3" containerID="e4832cc2a81e3d152e57b123ee039d8adf7a9972d8e042cf4f83d7606733ac18" exitCode=0 Oct 08 07:29:22 crc kubenswrapper[4693]: I1008 07:29:22.455540 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc" event={"ID":"f13d519d-a936-4e80-b8a3-f1946cb85ac3","Type":"ContainerDied","Data":"e4832cc2a81e3d152e57b123ee039d8adf7a9972d8e042cf4f83d7606733ac18"} Oct 08 07:29:22 crc kubenswrapper[4693]: I1008 07:29:22.456977 4693 generic.go:334] "Generic (PLEG): container finished" podID="927ac9fe-c982-487c-8258-e137f2ba8cdb" containerID="1e14bca08628a73687d55ffca86c228c41e8dbad53c9884f9e69eaf887607e04" exitCode=0 Oct 08 07:29:22 crc kubenswrapper[4693]: I1008 07:29:22.456999 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" event={"ID":"927ac9fe-c982-487c-8258-e137f2ba8cdb","Type":"ContainerDied","Data":"1e14bca08628a73687d55ffca86c228c41e8dbad53c9884f9e69eaf887607e04"} Oct 08 07:29:23 crc kubenswrapper[4693]: I1008 07:29:23.490142 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:29:23 crc kubenswrapper[4693]: I1008 07:29:23.490448 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:29:23 crc kubenswrapper[4693]: I1008 07:29:23.490485 4693 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 07:29:23 crc kubenswrapper[4693]: I1008 07:29:23.491010 4693 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9384b82d3cba6b720e3fd8f3218eba5a267966bc2aa3a68986f9c6356d620303"} pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 08 07:29:23 crc kubenswrapper[4693]: I1008 07:29:23.491075 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" containerID="cri-o://9384b82d3cba6b720e3fd8f3218eba5a267966bc2aa3a68986f9c6356d620303" gracePeriod=600 Oct 08 07:29:23 crc kubenswrapper[4693]: I1008 07:29:23.904874 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" Oct 08 07:29:23 crc kubenswrapper[4693]: I1008 07:29:23.931508 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-58887b5b9c-wlpks"] Oct 08 07:29:23 crc kubenswrapper[4693]: E1008 07:29:23.931720 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="927ac9fe-c982-487c-8258-e137f2ba8cdb" containerName="controller-manager" Oct 08 07:29:23 crc kubenswrapper[4693]: I1008 07:29:23.931731 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="927ac9fe-c982-487c-8258-e137f2ba8cdb" containerName="controller-manager" Oct 08 07:29:23 crc kubenswrapper[4693]: I1008 07:29:23.931839 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="927ac9fe-c982-487c-8258-e137f2ba8cdb" containerName="controller-manager" Oct 08 07:29:23 crc kubenswrapper[4693]: I1008 07:29:23.932175 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58887b5b9c-wlpks" Oct 08 07:29:23 crc kubenswrapper[4693]: I1008 07:29:23.949383 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-58887b5b9c-wlpks"] Oct 08 07:29:23 crc kubenswrapper[4693]: I1008 07:29:23.959803 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.052464 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/927ac9fe-c982-487c-8258-e137f2ba8cdb-proxy-ca-bundles\") pod \"927ac9fe-c982-487c-8258-e137f2ba8cdb\" (UID: \"927ac9fe-c982-487c-8258-e137f2ba8cdb\") " Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.052510 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/927ac9fe-c982-487c-8258-e137f2ba8cdb-client-ca\") pod \"927ac9fe-c982-487c-8258-e137f2ba8cdb\" (UID: \"927ac9fe-c982-487c-8258-e137f2ba8cdb\") " Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.052619 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/927ac9fe-c982-487c-8258-e137f2ba8cdb-config\") pod \"927ac9fe-c982-487c-8258-e137f2ba8cdb\" (UID: \"927ac9fe-c982-487c-8258-e137f2ba8cdb\") " Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.052672 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/927ac9fe-c982-487c-8258-e137f2ba8cdb-serving-cert\") pod \"927ac9fe-c982-487c-8258-e137f2ba8cdb\" (UID: \"927ac9fe-c982-487c-8258-e137f2ba8cdb\") " Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.053156 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9dwfq\" (UniqueName: \"kubernetes.io/projected/927ac9fe-c982-487c-8258-e137f2ba8cdb-kube-api-access-9dwfq\") pod \"927ac9fe-c982-487c-8258-e137f2ba8cdb\" (UID: \"927ac9fe-c982-487c-8258-e137f2ba8cdb\") " Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.053277 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/65b77fe2-da13-4fcb-8672-3286c71f788e-proxy-ca-bundles\") pod \"controller-manager-58887b5b9c-wlpks\" (UID: \"65b77fe2-da13-4fcb-8672-3286c71f788e\") " pod="openshift-controller-manager/controller-manager-58887b5b9c-wlpks" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.053279 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/927ac9fe-c982-487c-8258-e137f2ba8cdb-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "927ac9fe-c982-487c-8258-e137f2ba8cdb" (UID: "927ac9fe-c982-487c-8258-e137f2ba8cdb"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.053293 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/927ac9fe-c982-487c-8258-e137f2ba8cdb-client-ca" (OuterVolumeSpecName: "client-ca") pod "927ac9fe-c982-487c-8258-e137f2ba8cdb" (UID: "927ac9fe-c982-487c-8258-e137f2ba8cdb"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.053330 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/65b77fe2-da13-4fcb-8672-3286c71f788e-client-ca\") pod \"controller-manager-58887b5b9c-wlpks\" (UID: \"65b77fe2-da13-4fcb-8672-3286c71f788e\") " pod="openshift-controller-manager/controller-manager-58887b5b9c-wlpks" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.053448 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfhhl\" (UniqueName: \"kubernetes.io/projected/65b77fe2-da13-4fcb-8672-3286c71f788e-kube-api-access-qfhhl\") pod \"controller-manager-58887b5b9c-wlpks\" (UID: \"65b77fe2-da13-4fcb-8672-3286c71f788e\") " pod="openshift-controller-manager/controller-manager-58887b5b9c-wlpks" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.053540 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65b77fe2-da13-4fcb-8672-3286c71f788e-config\") pod \"controller-manager-58887b5b9c-wlpks\" (UID: \"65b77fe2-da13-4fcb-8672-3286c71f788e\") " pod="openshift-controller-manager/controller-manager-58887b5b9c-wlpks" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.053589 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65b77fe2-da13-4fcb-8672-3286c71f788e-serving-cert\") pod \"controller-manager-58887b5b9c-wlpks\" (UID: \"65b77fe2-da13-4fcb-8672-3286c71f788e\") " pod="openshift-controller-manager/controller-manager-58887b5b9c-wlpks" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.053692 4693 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/927ac9fe-c982-487c-8258-e137f2ba8cdb-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.053706 4693 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/927ac9fe-c982-487c-8258-e137f2ba8cdb-client-ca\") on node \"crc\" DevicePath \"\"" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.054063 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/927ac9fe-c982-487c-8258-e137f2ba8cdb-config" (OuterVolumeSpecName: "config") pod "927ac9fe-c982-487c-8258-e137f2ba8cdb" (UID: "927ac9fe-c982-487c-8258-e137f2ba8cdb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.058344 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/927ac9fe-c982-487c-8258-e137f2ba8cdb-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "927ac9fe-c982-487c-8258-e137f2ba8cdb" (UID: "927ac9fe-c982-487c-8258-e137f2ba8cdb"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.058358 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/927ac9fe-c982-487c-8258-e137f2ba8cdb-kube-api-access-9dwfq" (OuterVolumeSpecName: "kube-api-access-9dwfq") pod "927ac9fe-c982-487c-8258-e137f2ba8cdb" (UID: "927ac9fe-c982-487c-8258-e137f2ba8cdb"). InnerVolumeSpecName "kube-api-access-9dwfq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.154274 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xhc78\" (UniqueName: \"kubernetes.io/projected/f13d519d-a936-4e80-b8a3-f1946cb85ac3-kube-api-access-xhc78\") pod \"f13d519d-a936-4e80-b8a3-f1946cb85ac3\" (UID: \"f13d519d-a936-4e80-b8a3-f1946cb85ac3\") " Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.154484 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f13d519d-a936-4e80-b8a3-f1946cb85ac3-client-ca\") pod \"f13d519d-a936-4e80-b8a3-f1946cb85ac3\" (UID: \"f13d519d-a936-4e80-b8a3-f1946cb85ac3\") " Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.155144 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f13d519d-a936-4e80-b8a3-f1946cb85ac3-serving-cert\") pod \"f13d519d-a936-4e80-b8a3-f1946cb85ac3\" (UID: \"f13d519d-a936-4e80-b8a3-f1946cb85ac3\") " Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.155235 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f13d519d-a936-4e80-b8a3-f1946cb85ac3-client-ca" (OuterVolumeSpecName: "client-ca") pod "f13d519d-a936-4e80-b8a3-f1946cb85ac3" (UID: "f13d519d-a936-4e80-b8a3-f1946cb85ac3"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.155361 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f13d519d-a936-4e80-b8a3-f1946cb85ac3-config\") pod \"f13d519d-a936-4e80-b8a3-f1946cb85ac3\" (UID: \"f13d519d-a936-4e80-b8a3-f1946cb85ac3\") " Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.155859 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfhhl\" (UniqueName: \"kubernetes.io/projected/65b77fe2-da13-4fcb-8672-3286c71f788e-kube-api-access-qfhhl\") pod \"controller-manager-58887b5b9c-wlpks\" (UID: \"65b77fe2-da13-4fcb-8672-3286c71f788e\") " pod="openshift-controller-manager/controller-manager-58887b5b9c-wlpks" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.155989 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f13d519d-a936-4e80-b8a3-f1946cb85ac3-config" (OuterVolumeSpecName: "config") pod "f13d519d-a936-4e80-b8a3-f1946cb85ac3" (UID: "f13d519d-a936-4e80-b8a3-f1946cb85ac3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.156173 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65b77fe2-da13-4fcb-8672-3286c71f788e-config\") pod \"controller-manager-58887b5b9c-wlpks\" (UID: \"65b77fe2-da13-4fcb-8672-3286c71f788e\") " pod="openshift-controller-manager/controller-manager-58887b5b9c-wlpks" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.156327 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65b77fe2-da13-4fcb-8672-3286c71f788e-serving-cert\") pod \"controller-manager-58887b5b9c-wlpks\" (UID: \"65b77fe2-da13-4fcb-8672-3286c71f788e\") " pod="openshift-controller-manager/controller-manager-58887b5b9c-wlpks" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.156550 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/65b77fe2-da13-4fcb-8672-3286c71f788e-proxy-ca-bundles\") pod \"controller-manager-58887b5b9c-wlpks\" (UID: \"65b77fe2-da13-4fcb-8672-3286c71f788e\") " pod="openshift-controller-manager/controller-manager-58887b5b9c-wlpks" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.156711 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/65b77fe2-da13-4fcb-8672-3286c71f788e-client-ca\") pod \"controller-manager-58887b5b9c-wlpks\" (UID: \"65b77fe2-da13-4fcb-8672-3286c71f788e\") " pod="openshift-controller-manager/controller-manager-58887b5b9c-wlpks" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.156986 4693 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f13d519d-a936-4e80-b8a3-f1946cb85ac3-client-ca\") on node \"crc\" DevicePath \"\"" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.157096 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/927ac9fe-c982-487c-8258-e137f2ba8cdb-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.157202 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f13d519d-a936-4e80-b8a3-f1946cb85ac3-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.157288 4693 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/927ac9fe-c982-487c-8258-e137f2ba8cdb-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.157382 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9dwfq\" (UniqueName: \"kubernetes.io/projected/927ac9fe-c982-487c-8258-e137f2ba8cdb-kube-api-access-9dwfq\") on node \"crc\" DevicePath \"\"" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.157890 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65b77fe2-da13-4fcb-8672-3286c71f788e-config\") pod \"controller-manager-58887b5b9c-wlpks\" (UID: \"65b77fe2-da13-4fcb-8672-3286c71f788e\") " pod="openshift-controller-manager/controller-manager-58887b5b9c-wlpks" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.158109 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/65b77fe2-da13-4fcb-8672-3286c71f788e-client-ca\") pod \"controller-manager-58887b5b9c-wlpks\" (UID: \"65b77fe2-da13-4fcb-8672-3286c71f788e\") " pod="openshift-controller-manager/controller-manager-58887b5b9c-wlpks" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.158218 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/65b77fe2-da13-4fcb-8672-3286c71f788e-proxy-ca-bundles\") pod \"controller-manager-58887b5b9c-wlpks\" (UID: \"65b77fe2-da13-4fcb-8672-3286c71f788e\") " pod="openshift-controller-manager/controller-manager-58887b5b9c-wlpks" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.161331 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f13d519d-a936-4e80-b8a3-f1946cb85ac3-kube-api-access-xhc78" (OuterVolumeSpecName: "kube-api-access-xhc78") pod "f13d519d-a936-4e80-b8a3-f1946cb85ac3" (UID: "f13d519d-a936-4e80-b8a3-f1946cb85ac3"). InnerVolumeSpecName "kube-api-access-xhc78". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.161588 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f13d519d-a936-4e80-b8a3-f1946cb85ac3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "f13d519d-a936-4e80-b8a3-f1946cb85ac3" (UID: "f13d519d-a936-4e80-b8a3-f1946cb85ac3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.162020 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65b77fe2-da13-4fcb-8672-3286c71f788e-serving-cert\") pod \"controller-manager-58887b5b9c-wlpks\" (UID: \"65b77fe2-da13-4fcb-8672-3286c71f788e\") " pod="openshift-controller-manager/controller-manager-58887b5b9c-wlpks" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.181501 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfhhl\" (UniqueName: \"kubernetes.io/projected/65b77fe2-da13-4fcb-8672-3286c71f788e-kube-api-access-qfhhl\") pod \"controller-manager-58887b5b9c-wlpks\" (UID: \"65b77fe2-da13-4fcb-8672-3286c71f788e\") " pod="openshift-controller-manager/controller-manager-58887b5b9c-wlpks" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.259093 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xhc78\" (UniqueName: \"kubernetes.io/projected/f13d519d-a936-4e80-b8a3-f1946cb85ac3-kube-api-access-xhc78\") on node \"crc\" DevicePath \"\"" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.259141 4693 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f13d519d-a936-4e80-b8a3-f1946cb85ac3-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.272717 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58887b5b9c-wlpks" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.378856 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7cbbfd8fd7-smv49"] Oct 08 07:29:24 crc kubenswrapper[4693]: E1008 07:29:24.379433 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f13d519d-a936-4e80-b8a3-f1946cb85ac3" containerName="route-controller-manager" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.379450 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="f13d519d-a936-4e80-b8a3-f1946cb85ac3" containerName="route-controller-manager" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.379595 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="f13d519d-a936-4e80-b8a3-f1946cb85ac3" containerName="route-controller-manager" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.380153 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7cbbfd8fd7-smv49" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.385641 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7cbbfd8fd7-smv49"] Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.472721 4693 generic.go:334] "Generic (PLEG): container finished" podID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerID="9384b82d3cba6b720e3fd8f3218eba5a267966bc2aa3a68986f9c6356d620303" exitCode=0 Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.472780 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerDied","Data":"9384b82d3cba6b720e3fd8f3218eba5a267966bc2aa3a68986f9c6356d620303"} Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.472839 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerStarted","Data":"2d5b335782865c10d71fa446e1c1690f0fdc6f76d8d4163cb446a08bd0b03853"} Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.472904 4693 scope.go:117] "RemoveContainer" containerID="1f192715e7fc72c7f7c38f3d78b7db0c4e4be64fdd4d7049d590c1c31d9d85d1" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.477447 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" event={"ID":"927ac9fe-c982-487c-8258-e137f2ba8cdb","Type":"ContainerDied","Data":"cd04abe9946779742eed56c0692288b799eba6d4c9cea8a9e81b62e7e2f16f4f"} Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.477460 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-pcr5x" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.478569 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-4857d" event={"ID":"b17ddd86-5c6e-4898-a859-ce43f604fc10","Type":"ContainerStarted","Data":"db56fee59ebdd6b75aefc0bf2b856b57e8358dfdb5c3d47925d0f3dc819f533c"} Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.479077 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-4857d" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.480437 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc" event={"ID":"f13d519d-a936-4e80-b8a3-f1946cb85ac3","Type":"ContainerDied","Data":"90819c8ed98934fd3ee65a023f5cb47f1de1183f15345a5c50e5abfca26376d2"} Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.480502 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.484059 4693 generic.go:334] "Generic (PLEG): container finished" podID="a7633c77-af8c-4789-88d5-fbbb01f3e751" containerID="4ac753e2890450c187939a9c9c71deeb11cabf79be77e1176d7d80e7b7169094" exitCode=0 Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.484095 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jglgg" event={"ID":"a7633c77-af8c-4789-88d5-fbbb01f3e751","Type":"ContainerDied","Data":"4ac753e2890450c187939a9c9c71deeb11cabf79be77e1176d7d80e7b7169094"} Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.523640 4693 scope.go:117] "RemoveContainer" containerID="1e14bca08628a73687d55ffca86c228c41e8dbad53c9884f9e69eaf887607e04" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.538979 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-4857d" podStartSLOduration=1.7628982359999998 podStartE2EDuration="9.538960971s" podCreationTimestamp="2025-10-08 07:29:15 +0000 UTC" firstStartedPulling="2025-10-08 07:29:15.938632785 +0000 UTC m=+741.309597720" lastFinishedPulling="2025-10-08 07:29:23.71469552 +0000 UTC m=+749.085660455" observedRunningTime="2025-10-08 07:29:24.537896893 +0000 UTC m=+749.908861848" watchObservedRunningTime="2025-10-08 07:29:24.538960971 +0000 UTC m=+749.909925926" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.553417 4693 scope.go:117] "RemoveContainer" containerID="e4832cc2a81e3d152e57b123ee039d8adf7a9972d8e042cf4f83d7606733ac18" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.561915 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6d465c0-f1f0-490d-8e16-27e792f4f1da-config\") pod \"route-controller-manager-7cbbfd8fd7-smv49\" (UID: \"b6d465c0-f1f0-490d-8e16-27e792f4f1da\") " pod="openshift-route-controller-manager/route-controller-manager-7cbbfd8fd7-smv49" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.561949 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b6d465c0-f1f0-490d-8e16-27e792f4f1da-client-ca\") pod \"route-controller-manager-7cbbfd8fd7-smv49\" (UID: \"b6d465c0-f1f0-490d-8e16-27e792f4f1da\") " pod="openshift-route-controller-manager/route-controller-manager-7cbbfd8fd7-smv49" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.562039 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b6d465c0-f1f0-490d-8e16-27e792f4f1da-serving-cert\") pod \"route-controller-manager-7cbbfd8fd7-smv49\" (UID: \"b6d465c0-f1f0-490d-8e16-27e792f4f1da\") " pod="openshift-route-controller-manager/route-controller-manager-7cbbfd8fd7-smv49" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.562062 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjkdp\" (UniqueName: \"kubernetes.io/projected/b6d465c0-f1f0-490d-8e16-27e792f4f1da-kube-api-access-jjkdp\") pod \"route-controller-manager-7cbbfd8fd7-smv49\" (UID: \"b6d465c0-f1f0-490d-8e16-27e792f4f1da\") " pod="openshift-route-controller-manager/route-controller-manager-7cbbfd8fd7-smv49" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.566319 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-pcr5x"] Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.584174 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-pcr5x"] Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.584241 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc"] Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.592493 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-ndznc"] Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.663473 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b6d465c0-f1f0-490d-8e16-27e792f4f1da-serving-cert\") pod \"route-controller-manager-7cbbfd8fd7-smv49\" (UID: \"b6d465c0-f1f0-490d-8e16-27e792f4f1da\") " pod="openshift-route-controller-manager/route-controller-manager-7cbbfd8fd7-smv49" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.664009 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjkdp\" (UniqueName: \"kubernetes.io/projected/b6d465c0-f1f0-490d-8e16-27e792f4f1da-kube-api-access-jjkdp\") pod \"route-controller-manager-7cbbfd8fd7-smv49\" (UID: \"b6d465c0-f1f0-490d-8e16-27e792f4f1da\") " pod="openshift-route-controller-manager/route-controller-manager-7cbbfd8fd7-smv49" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.664104 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6d465c0-f1f0-490d-8e16-27e792f4f1da-config\") pod \"route-controller-manager-7cbbfd8fd7-smv49\" (UID: \"b6d465c0-f1f0-490d-8e16-27e792f4f1da\") " pod="openshift-route-controller-manager/route-controller-manager-7cbbfd8fd7-smv49" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.664145 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b6d465c0-f1f0-490d-8e16-27e792f4f1da-client-ca\") pod \"route-controller-manager-7cbbfd8fd7-smv49\" (UID: \"b6d465c0-f1f0-490d-8e16-27e792f4f1da\") " pod="openshift-route-controller-manager/route-controller-manager-7cbbfd8fd7-smv49" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.665271 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6d465c0-f1f0-490d-8e16-27e792f4f1da-config\") pod \"route-controller-manager-7cbbfd8fd7-smv49\" (UID: \"b6d465c0-f1f0-490d-8e16-27e792f4f1da\") " pod="openshift-route-controller-manager/route-controller-manager-7cbbfd8fd7-smv49" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.665540 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b6d465c0-f1f0-490d-8e16-27e792f4f1da-client-ca\") pod \"route-controller-manager-7cbbfd8fd7-smv49\" (UID: \"b6d465c0-f1f0-490d-8e16-27e792f4f1da\") " pod="openshift-route-controller-manager/route-controller-manager-7cbbfd8fd7-smv49" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.675403 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b6d465c0-f1f0-490d-8e16-27e792f4f1da-serving-cert\") pod \"route-controller-manager-7cbbfd8fd7-smv49\" (UID: \"b6d465c0-f1f0-490d-8e16-27e792f4f1da\") " pod="openshift-route-controller-manager/route-controller-manager-7cbbfd8fd7-smv49" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.682847 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjkdp\" (UniqueName: \"kubernetes.io/projected/b6d465c0-f1f0-490d-8e16-27e792f4f1da-kube-api-access-jjkdp\") pod \"route-controller-manager-7cbbfd8fd7-smv49\" (UID: \"b6d465c0-f1f0-490d-8e16-27e792f4f1da\") " pod="openshift-route-controller-manager/route-controller-manager-7cbbfd8fd7-smv49" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.708082 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7cbbfd8fd7-smv49" Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.725053 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-58887b5b9c-wlpks"] Oct 08 07:29:24 crc kubenswrapper[4693]: W1008 07:29:24.733280 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod65b77fe2_da13_4fcb_8672_3286c71f788e.slice/crio-b9ced7d93a91565d0f77af0690920891714991b0ac17287ead3226550f61280d WatchSource:0}: Error finding container b9ced7d93a91565d0f77af0690920891714991b0ac17287ead3226550f61280d: Status 404 returned error can't find the container with id b9ced7d93a91565d0f77af0690920891714991b0ac17287ead3226550f61280d Oct 08 07:29:24 crc kubenswrapper[4693]: I1008 07:29:24.940381 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7cbbfd8fd7-smv49"] Oct 08 07:29:24 crc kubenswrapper[4693]: W1008 07:29:24.944985 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb6d465c0_f1f0_490d_8e16_27e792f4f1da.slice/crio-9e65d43320db9a7bc8fa1c0e5d3e2ca34047d790c950d521cadfdec1809bf51c WatchSource:0}: Error finding container 9e65d43320db9a7bc8fa1c0e5d3e2ca34047d790c950d521cadfdec1809bf51c: Status 404 returned error can't find the container with id 9e65d43320db9a7bc8fa1c0e5d3e2ca34047d790c950d521cadfdec1809bf51c Oct 08 07:29:25 crc kubenswrapper[4693]: I1008 07:29:25.369795 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="927ac9fe-c982-487c-8258-e137f2ba8cdb" path="/var/lib/kubelet/pods/927ac9fe-c982-487c-8258-e137f2ba8cdb/volumes" Oct 08 07:29:25 crc kubenswrapper[4693]: I1008 07:29:25.370698 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f13d519d-a936-4e80-b8a3-f1946cb85ac3" path="/var/lib/kubelet/pods/f13d519d-a936-4e80-b8a3-f1946cb85ac3/volumes" Oct 08 07:29:25 crc kubenswrapper[4693]: I1008 07:29:25.501638 4693 generic.go:334] "Generic (PLEG): container finished" podID="a7633c77-af8c-4789-88d5-fbbb01f3e751" containerID="388814abfdb3c8f523ea30bcd124674a840ce5a8bfa308b84a8a01b78f92ec4d" exitCode=0 Oct 08 07:29:25 crc kubenswrapper[4693]: I1008 07:29:25.501728 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jglgg" event={"ID":"a7633c77-af8c-4789-88d5-fbbb01f3e751","Type":"ContainerDied","Data":"388814abfdb3c8f523ea30bcd124674a840ce5a8bfa308b84a8a01b78f92ec4d"} Oct 08 07:29:25 crc kubenswrapper[4693]: I1008 07:29:25.503207 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58887b5b9c-wlpks" event={"ID":"65b77fe2-da13-4fcb-8672-3286c71f788e","Type":"ContainerStarted","Data":"df6b3334683557532bb1ff363d00a26bc4880e642236315e2361a64458b0ea36"} Oct 08 07:29:25 crc kubenswrapper[4693]: I1008 07:29:25.503236 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58887b5b9c-wlpks" event={"ID":"65b77fe2-da13-4fcb-8672-3286c71f788e","Type":"ContainerStarted","Data":"b9ced7d93a91565d0f77af0690920891714991b0ac17287ead3226550f61280d"} Oct 08 07:29:25 crc kubenswrapper[4693]: I1008 07:29:25.503434 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-58887b5b9c-wlpks" Oct 08 07:29:25 crc kubenswrapper[4693]: I1008 07:29:25.504504 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7cbbfd8fd7-smv49" event={"ID":"b6d465c0-f1f0-490d-8e16-27e792f4f1da","Type":"ContainerStarted","Data":"d5e8879c4edcca1bf6316237f798136b3a4213d88f703c59515962835fa9d604"} Oct 08 07:29:25 crc kubenswrapper[4693]: I1008 07:29:25.504538 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7cbbfd8fd7-smv49" event={"ID":"b6d465c0-f1f0-490d-8e16-27e792f4f1da","Type":"ContainerStarted","Data":"9e65d43320db9a7bc8fa1c0e5d3e2ca34047d790c950d521cadfdec1809bf51c"} Oct 08 07:29:25 crc kubenswrapper[4693]: I1008 07:29:25.505063 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7cbbfd8fd7-smv49" Oct 08 07:29:25 crc kubenswrapper[4693]: I1008 07:29:25.513444 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-58887b5b9c-wlpks" Oct 08 07:29:25 crc kubenswrapper[4693]: I1008 07:29:25.513663 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7cbbfd8fd7-smv49" Oct 08 07:29:25 crc kubenswrapper[4693]: I1008 07:29:25.558733 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-58887b5b9c-wlpks" podStartSLOduration=4.558714636 podStartE2EDuration="4.558714636s" podCreationTimestamp="2025-10-08 07:29:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:29:25.556623582 +0000 UTC m=+750.927588537" watchObservedRunningTime="2025-10-08 07:29:25.558714636 +0000 UTC m=+750.929679571" Oct 08 07:29:25 crc kubenswrapper[4693]: I1008 07:29:25.583184 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7cbbfd8fd7-smv49" podStartSLOduration=2.5831675020000002 podStartE2EDuration="2.583167502s" podCreationTimestamp="2025-10-08 07:29:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:29:25.581989592 +0000 UTC m=+750.952954527" watchObservedRunningTime="2025-10-08 07:29:25.583167502 +0000 UTC m=+750.954132437" Oct 08 07:29:25 crc kubenswrapper[4693]: I1008 07:29:25.603544 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-68d546b9d8-nj6r5" Oct 08 07:29:26 crc kubenswrapper[4693]: I1008 07:29:26.197989 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-tjkwg" Oct 08 07:29:26 crc kubenswrapper[4693]: I1008 07:29:26.520893 4693 generic.go:334] "Generic (PLEG): container finished" podID="a7633c77-af8c-4789-88d5-fbbb01f3e751" containerID="252b30965ec9d08e1de6240333d827ae1aa5cd8739ec80c0dda79ac524e44073" exitCode=0 Oct 08 07:29:26 crc kubenswrapper[4693]: I1008 07:29:26.521118 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jglgg" event={"ID":"a7633c77-af8c-4789-88d5-fbbb01f3e751","Type":"ContainerDied","Data":"252b30965ec9d08e1de6240333d827ae1aa5cd8739ec80c0dda79ac524e44073"} Oct 08 07:29:27 crc kubenswrapper[4693]: I1008 07:29:27.540346 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jglgg" event={"ID":"a7633c77-af8c-4789-88d5-fbbb01f3e751","Type":"ContainerStarted","Data":"ea7ebc066f8d2a05026e24decf99a84d838e50e392ecff7c3d7ac88bfb539595"} Oct 08 07:29:27 crc kubenswrapper[4693]: I1008 07:29:27.540860 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jglgg" event={"ID":"a7633c77-af8c-4789-88d5-fbbb01f3e751","Type":"ContainerStarted","Data":"a33a87ab4451be82ebbda44e776e7e4312d55c64c1c8e9779776ceec7322d212"} Oct 08 07:29:27 crc kubenswrapper[4693]: I1008 07:29:27.540902 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jglgg" event={"ID":"a7633c77-af8c-4789-88d5-fbbb01f3e751","Type":"ContainerStarted","Data":"41b4489d745afa416e79b412b3cc7122a90466743373265e04632cbd91f8c77f"} Oct 08 07:29:27 crc kubenswrapper[4693]: I1008 07:29:27.540932 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jglgg" event={"ID":"a7633c77-af8c-4789-88d5-fbbb01f3e751","Type":"ContainerStarted","Data":"f03c599dd973529cec6f3848c647bb2e6fc866d528a07cf0651dd403e1227ade"} Oct 08 07:29:27 crc kubenswrapper[4693]: I1008 07:29:27.540950 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jglgg" event={"ID":"a7633c77-af8c-4789-88d5-fbbb01f3e751","Type":"ContainerStarted","Data":"420a534825e87a114c7e37033288ca5a2adc8dbf503131fc9a45be23dbce97f1"} Oct 08 07:29:28 crc kubenswrapper[4693]: I1008 07:29:28.556802 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jglgg" event={"ID":"a7633c77-af8c-4789-88d5-fbbb01f3e751","Type":"ContainerStarted","Data":"6aa3b83185094ac8bff7de9582963970ca3d8029176be70966d19ea9b3bccc3b"} Oct 08 07:29:28 crc kubenswrapper[4693]: I1008 07:29:28.557187 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-jglgg" Oct 08 07:29:28 crc kubenswrapper[4693]: I1008 07:29:28.591808 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-jglgg" podStartSLOduration=5.486505123 podStartE2EDuration="13.591786001s" podCreationTimestamp="2025-10-08 07:29:15 +0000 UTC" firstStartedPulling="2025-10-08 07:29:15.646782164 +0000 UTC m=+741.017747099" lastFinishedPulling="2025-10-08 07:29:23.752063032 +0000 UTC m=+749.123027977" observedRunningTime="2025-10-08 07:29:28.590588959 +0000 UTC m=+753.961553904" watchObservedRunningTime="2025-10-08 07:29:28.591786001 +0000 UTC m=+753.962750976" Oct 08 07:29:28 crc kubenswrapper[4693]: I1008 07:29:28.797955 4693 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 08 07:29:29 crc kubenswrapper[4693]: I1008 07:29:29.288082 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-jj5jr"] Oct 08 07:29:29 crc kubenswrapper[4693]: I1008 07:29:29.289137 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jj5jr" Oct 08 07:29:29 crc kubenswrapper[4693]: I1008 07:29:29.291387 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Oct 08 07:29:29 crc kubenswrapper[4693]: I1008 07:29:29.292470 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Oct 08 07:29:29 crc kubenswrapper[4693]: I1008 07:29:29.342794 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-jj5jr"] Oct 08 07:29:29 crc kubenswrapper[4693]: I1008 07:29:29.436102 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sh8fh\" (UniqueName: \"kubernetes.io/projected/7a88cf58-5a0f-4285-b988-3f3bb99181f2-kube-api-access-sh8fh\") pod \"openstack-operator-index-jj5jr\" (UID: \"7a88cf58-5a0f-4285-b988-3f3bb99181f2\") " pod="openstack-operators/openstack-operator-index-jj5jr" Oct 08 07:29:29 crc kubenswrapper[4693]: I1008 07:29:29.537173 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sh8fh\" (UniqueName: \"kubernetes.io/projected/7a88cf58-5a0f-4285-b988-3f3bb99181f2-kube-api-access-sh8fh\") pod \"openstack-operator-index-jj5jr\" (UID: \"7a88cf58-5a0f-4285-b988-3f3bb99181f2\") " pod="openstack-operators/openstack-operator-index-jj5jr" Oct 08 07:29:29 crc kubenswrapper[4693]: I1008 07:29:29.563330 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sh8fh\" (UniqueName: \"kubernetes.io/projected/7a88cf58-5a0f-4285-b988-3f3bb99181f2-kube-api-access-sh8fh\") pod \"openstack-operator-index-jj5jr\" (UID: \"7a88cf58-5a0f-4285-b988-3f3bb99181f2\") " pod="openstack-operators/openstack-operator-index-jj5jr" Oct 08 07:29:29 crc kubenswrapper[4693]: I1008 07:29:29.612359 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jj5jr" Oct 08 07:29:30 crc kubenswrapper[4693]: I1008 07:29:30.059652 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-jj5jr"] Oct 08 07:29:30 crc kubenswrapper[4693]: W1008 07:29:30.060364 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7a88cf58_5a0f_4285_b988_3f3bb99181f2.slice/crio-1b5afff89579dfca028ce310068d37e647b109a9a4e19a2c048bb5c86e608f51 WatchSource:0}: Error finding container 1b5afff89579dfca028ce310068d37e647b109a9a4e19a2c048bb5c86e608f51: Status 404 returned error can't find the container with id 1b5afff89579dfca028ce310068d37e647b109a9a4e19a2c048bb5c86e608f51 Oct 08 07:29:30 crc kubenswrapper[4693]: I1008 07:29:30.516608 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-jglgg" Oct 08 07:29:30 crc kubenswrapper[4693]: I1008 07:29:30.557552 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-jglgg" Oct 08 07:29:30 crc kubenswrapper[4693]: I1008 07:29:30.574286 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jj5jr" event={"ID":"7a88cf58-5a0f-4285-b988-3f3bb99181f2","Type":"ContainerStarted","Data":"1b5afff89579dfca028ce310068d37e647b109a9a4e19a2c048bb5c86e608f51"} Oct 08 07:29:32 crc kubenswrapper[4693]: I1008 07:29:32.056247 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-jj5jr"] Oct 08 07:29:32 crc kubenswrapper[4693]: I1008 07:29:32.669937 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-qbdg8"] Oct 08 07:29:32 crc kubenswrapper[4693]: I1008 07:29:32.671668 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-qbdg8" Oct 08 07:29:32 crc kubenswrapper[4693]: I1008 07:29:32.675183 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-5zmvg" Oct 08 07:29:32 crc kubenswrapper[4693]: I1008 07:29:32.686592 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-qbdg8"] Oct 08 07:29:32 crc kubenswrapper[4693]: I1008 07:29:32.783233 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79d4p\" (UniqueName: \"kubernetes.io/projected/875357f9-bf15-47db-83a9-12868aca6f98-kube-api-access-79d4p\") pod \"openstack-operator-index-qbdg8\" (UID: \"875357f9-bf15-47db-83a9-12868aca6f98\") " pod="openstack-operators/openstack-operator-index-qbdg8" Oct 08 07:29:32 crc kubenswrapper[4693]: I1008 07:29:32.884429 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79d4p\" (UniqueName: \"kubernetes.io/projected/875357f9-bf15-47db-83a9-12868aca6f98-kube-api-access-79d4p\") pod \"openstack-operator-index-qbdg8\" (UID: \"875357f9-bf15-47db-83a9-12868aca6f98\") " pod="openstack-operators/openstack-operator-index-qbdg8" Oct 08 07:29:32 crc kubenswrapper[4693]: I1008 07:29:32.905041 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79d4p\" (UniqueName: \"kubernetes.io/projected/875357f9-bf15-47db-83a9-12868aca6f98-kube-api-access-79d4p\") pod \"openstack-operator-index-qbdg8\" (UID: \"875357f9-bf15-47db-83a9-12868aca6f98\") " pod="openstack-operators/openstack-operator-index-qbdg8" Oct 08 07:29:33 crc kubenswrapper[4693]: I1008 07:29:33.003853 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-qbdg8" Oct 08 07:29:34 crc kubenswrapper[4693]: I1008 07:29:34.246083 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-qbdg8"] Oct 08 07:29:34 crc kubenswrapper[4693]: W1008 07:29:34.254876 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod875357f9_bf15_47db_83a9_12868aca6f98.slice/crio-50524872cc721fdb50e4a484ecba86d2ba739f7f740731097bdf014d7c7ef5cf WatchSource:0}: Error finding container 50524872cc721fdb50e4a484ecba86d2ba739f7f740731097bdf014d7c7ef5cf: Status 404 returned error can't find the container with id 50524872cc721fdb50e4a484ecba86d2ba739f7f740731097bdf014d7c7ef5cf Oct 08 07:29:34 crc kubenswrapper[4693]: I1008 07:29:34.610589 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-qbdg8" event={"ID":"875357f9-bf15-47db-83a9-12868aca6f98","Type":"ContainerStarted","Data":"9c838d8aa4feb71a58bae390f380424ca66c0bbc202f286a44698da0ba5a362f"} Oct 08 07:29:34 crc kubenswrapper[4693]: I1008 07:29:34.610928 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-qbdg8" event={"ID":"875357f9-bf15-47db-83a9-12868aca6f98","Type":"ContainerStarted","Data":"50524872cc721fdb50e4a484ecba86d2ba739f7f740731097bdf014d7c7ef5cf"} Oct 08 07:29:34 crc kubenswrapper[4693]: I1008 07:29:34.613537 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jj5jr" event={"ID":"7a88cf58-5a0f-4285-b988-3f3bb99181f2","Type":"ContainerStarted","Data":"ca3d85288782189a62d48efddc4a067ecbe0fb11c274a4ee694a526e89e53bea"} Oct 08 07:29:34 crc kubenswrapper[4693]: I1008 07:29:34.613721 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-jj5jr" podUID="7a88cf58-5a0f-4285-b988-3f3bb99181f2" containerName="registry-server" containerID="cri-o://ca3d85288782189a62d48efddc4a067ecbe0fb11c274a4ee694a526e89e53bea" gracePeriod=2 Oct 08 07:29:34 crc kubenswrapper[4693]: I1008 07:29:34.632545 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-qbdg8" podStartSLOduration=2.579733095 podStartE2EDuration="2.632515938s" podCreationTimestamp="2025-10-08 07:29:32 +0000 UTC" firstStartedPulling="2025-10-08 07:29:34.261845887 +0000 UTC m=+759.632810872" lastFinishedPulling="2025-10-08 07:29:34.31462874 +0000 UTC m=+759.685593715" observedRunningTime="2025-10-08 07:29:34.629107309 +0000 UTC m=+760.000072284" watchObservedRunningTime="2025-10-08 07:29:34.632515938 +0000 UTC m=+760.003480903" Oct 08 07:29:34 crc kubenswrapper[4693]: I1008 07:29:34.649659 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-jj5jr" podStartSLOduration=1.876846947 podStartE2EDuration="5.649634983s" podCreationTimestamp="2025-10-08 07:29:29 +0000 UTC" firstStartedPulling="2025-10-08 07:29:30.062580108 +0000 UTC m=+755.433545043" lastFinishedPulling="2025-10-08 07:29:33.835368134 +0000 UTC m=+759.206333079" observedRunningTime="2025-10-08 07:29:34.645534866 +0000 UTC m=+760.016499851" watchObservedRunningTime="2025-10-08 07:29:34.649634983 +0000 UTC m=+760.020599948" Oct 08 07:29:35 crc kubenswrapper[4693]: I1008 07:29:35.142093 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jj5jr" Oct 08 07:29:35 crc kubenswrapper[4693]: I1008 07:29:35.222760 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sh8fh\" (UniqueName: \"kubernetes.io/projected/7a88cf58-5a0f-4285-b988-3f3bb99181f2-kube-api-access-sh8fh\") pod \"7a88cf58-5a0f-4285-b988-3f3bb99181f2\" (UID: \"7a88cf58-5a0f-4285-b988-3f3bb99181f2\") " Oct 08 07:29:35 crc kubenswrapper[4693]: I1008 07:29:35.230567 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a88cf58-5a0f-4285-b988-3f3bb99181f2-kube-api-access-sh8fh" (OuterVolumeSpecName: "kube-api-access-sh8fh") pod "7a88cf58-5a0f-4285-b988-3f3bb99181f2" (UID: "7a88cf58-5a0f-4285-b988-3f3bb99181f2"). InnerVolumeSpecName "kube-api-access-sh8fh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:29:35 crc kubenswrapper[4693]: I1008 07:29:35.324195 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sh8fh\" (UniqueName: \"kubernetes.io/projected/7a88cf58-5a0f-4285-b988-3f3bb99181f2-kube-api-access-sh8fh\") on node \"crc\" DevicePath \"\"" Oct 08 07:29:35 crc kubenswrapper[4693]: I1008 07:29:35.531850 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-4857d" Oct 08 07:29:35 crc kubenswrapper[4693]: I1008 07:29:35.623048 4693 generic.go:334] "Generic (PLEG): container finished" podID="7a88cf58-5a0f-4285-b988-3f3bb99181f2" containerID="ca3d85288782189a62d48efddc4a067ecbe0fb11c274a4ee694a526e89e53bea" exitCode=0 Oct 08 07:29:35 crc kubenswrapper[4693]: I1008 07:29:35.623106 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jj5jr" Oct 08 07:29:35 crc kubenswrapper[4693]: I1008 07:29:35.623108 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jj5jr" event={"ID":"7a88cf58-5a0f-4285-b988-3f3bb99181f2","Type":"ContainerDied","Data":"ca3d85288782189a62d48efddc4a067ecbe0fb11c274a4ee694a526e89e53bea"} Oct 08 07:29:35 crc kubenswrapper[4693]: I1008 07:29:35.623249 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jj5jr" event={"ID":"7a88cf58-5a0f-4285-b988-3f3bb99181f2","Type":"ContainerDied","Data":"1b5afff89579dfca028ce310068d37e647b109a9a4e19a2c048bb5c86e608f51"} Oct 08 07:29:35 crc kubenswrapper[4693]: I1008 07:29:35.623285 4693 scope.go:117] "RemoveContainer" containerID="ca3d85288782189a62d48efddc4a067ecbe0fb11c274a4ee694a526e89e53bea" Oct 08 07:29:35 crc kubenswrapper[4693]: I1008 07:29:35.654753 4693 scope.go:117] "RemoveContainer" containerID="ca3d85288782189a62d48efddc4a067ecbe0fb11c274a4ee694a526e89e53bea" Oct 08 07:29:35 crc kubenswrapper[4693]: E1008 07:29:35.657268 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca3d85288782189a62d48efddc4a067ecbe0fb11c274a4ee694a526e89e53bea\": container with ID starting with ca3d85288782189a62d48efddc4a067ecbe0fb11c274a4ee694a526e89e53bea not found: ID does not exist" containerID="ca3d85288782189a62d48efddc4a067ecbe0fb11c274a4ee694a526e89e53bea" Oct 08 07:29:35 crc kubenswrapper[4693]: I1008 07:29:35.657317 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca3d85288782189a62d48efddc4a067ecbe0fb11c274a4ee694a526e89e53bea"} err="failed to get container status \"ca3d85288782189a62d48efddc4a067ecbe0fb11c274a4ee694a526e89e53bea\": rpc error: code = NotFound desc = could not find container \"ca3d85288782189a62d48efddc4a067ecbe0fb11c274a4ee694a526e89e53bea\": container with ID starting with ca3d85288782189a62d48efddc4a067ecbe0fb11c274a4ee694a526e89e53bea not found: ID does not exist" Oct 08 07:29:35 crc kubenswrapper[4693]: I1008 07:29:35.660340 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-jj5jr"] Oct 08 07:29:35 crc kubenswrapper[4693]: I1008 07:29:35.666618 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-jj5jr"] Oct 08 07:29:37 crc kubenswrapper[4693]: I1008 07:29:37.382397 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a88cf58-5a0f-4285-b988-3f3bb99181f2" path="/var/lib/kubelet/pods/7a88cf58-5a0f-4285-b988-3f3bb99181f2/volumes" Oct 08 07:29:43 crc kubenswrapper[4693]: I1008 07:29:43.005094 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-qbdg8" Oct 08 07:29:43 crc kubenswrapper[4693]: I1008 07:29:43.005529 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-qbdg8" Oct 08 07:29:43 crc kubenswrapper[4693]: I1008 07:29:43.043612 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-qbdg8" Oct 08 07:29:43 crc kubenswrapper[4693]: I1008 07:29:43.741448 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-qbdg8" Oct 08 07:29:45 crc kubenswrapper[4693]: I1008 07:29:45.520353 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-jglgg" Oct 08 07:29:50 crc kubenswrapper[4693]: I1008 07:29:50.945148 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn"] Oct 08 07:29:50 crc kubenswrapper[4693]: E1008 07:29:50.945707 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a88cf58-5a0f-4285-b988-3f3bb99181f2" containerName="registry-server" Oct 08 07:29:50 crc kubenswrapper[4693]: I1008 07:29:50.945722 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a88cf58-5a0f-4285-b988-3f3bb99181f2" containerName="registry-server" Oct 08 07:29:50 crc kubenswrapper[4693]: I1008 07:29:50.945904 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a88cf58-5a0f-4285-b988-3f3bb99181f2" containerName="registry-server" Oct 08 07:29:50 crc kubenswrapper[4693]: I1008 07:29:50.946867 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn" Oct 08 07:29:50 crc kubenswrapper[4693]: I1008 07:29:50.949854 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-7l88q" Oct 08 07:29:50 crc kubenswrapper[4693]: I1008 07:29:50.997004 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn"] Oct 08 07:29:51 crc kubenswrapper[4693]: I1008 07:29:51.057447 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b56656be-6638-40df-b380-d88dbc891f53-bundle\") pod \"b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn\" (UID: \"b56656be-6638-40df-b380-d88dbc891f53\") " pod="openstack-operators/b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn" Oct 08 07:29:51 crc kubenswrapper[4693]: I1008 07:29:51.057668 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzb9n\" (UniqueName: \"kubernetes.io/projected/b56656be-6638-40df-b380-d88dbc891f53-kube-api-access-dzb9n\") pod \"b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn\" (UID: \"b56656be-6638-40df-b380-d88dbc891f53\") " pod="openstack-operators/b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn" Oct 08 07:29:51 crc kubenswrapper[4693]: I1008 07:29:51.057785 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b56656be-6638-40df-b380-d88dbc891f53-util\") pod \"b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn\" (UID: \"b56656be-6638-40df-b380-d88dbc891f53\") " pod="openstack-operators/b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn" Oct 08 07:29:51 crc kubenswrapper[4693]: I1008 07:29:51.159683 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b56656be-6638-40df-b380-d88dbc891f53-bundle\") pod \"b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn\" (UID: \"b56656be-6638-40df-b380-d88dbc891f53\") " pod="openstack-operators/b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn" Oct 08 07:29:51 crc kubenswrapper[4693]: I1008 07:29:51.160210 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzb9n\" (UniqueName: \"kubernetes.io/projected/b56656be-6638-40df-b380-d88dbc891f53-kube-api-access-dzb9n\") pod \"b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn\" (UID: \"b56656be-6638-40df-b380-d88dbc891f53\") " pod="openstack-operators/b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn" Oct 08 07:29:51 crc kubenswrapper[4693]: I1008 07:29:51.160282 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b56656be-6638-40df-b380-d88dbc891f53-util\") pod \"b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn\" (UID: \"b56656be-6638-40df-b380-d88dbc891f53\") " pod="openstack-operators/b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn" Oct 08 07:29:51 crc kubenswrapper[4693]: I1008 07:29:51.160498 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b56656be-6638-40df-b380-d88dbc891f53-bundle\") pod \"b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn\" (UID: \"b56656be-6638-40df-b380-d88dbc891f53\") " pod="openstack-operators/b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn" Oct 08 07:29:51 crc kubenswrapper[4693]: I1008 07:29:51.160871 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b56656be-6638-40df-b380-d88dbc891f53-util\") pod \"b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn\" (UID: \"b56656be-6638-40df-b380-d88dbc891f53\") " pod="openstack-operators/b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn" Oct 08 07:29:51 crc kubenswrapper[4693]: I1008 07:29:51.195692 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzb9n\" (UniqueName: \"kubernetes.io/projected/b56656be-6638-40df-b380-d88dbc891f53-kube-api-access-dzb9n\") pod \"b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn\" (UID: \"b56656be-6638-40df-b380-d88dbc891f53\") " pod="openstack-operators/b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn" Oct 08 07:29:51 crc kubenswrapper[4693]: I1008 07:29:51.264734 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn" Oct 08 07:29:51 crc kubenswrapper[4693]: I1008 07:29:51.708990 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn"] Oct 08 07:29:51 crc kubenswrapper[4693]: I1008 07:29:51.786006 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn" event={"ID":"b56656be-6638-40df-b380-d88dbc891f53","Type":"ContainerStarted","Data":"9abf19b58f188f5542218e650cf9b3daa88e49cdb5b909d82bda14c932122626"} Oct 08 07:29:52 crc kubenswrapper[4693]: I1008 07:29:52.795296 4693 generic.go:334] "Generic (PLEG): container finished" podID="b56656be-6638-40df-b380-d88dbc891f53" containerID="f5b4648b8c7c5264a3074ffa88271d32be0e76bf7dd6102d7b60719a2c1e3935" exitCode=0 Oct 08 07:29:52 crc kubenswrapper[4693]: I1008 07:29:52.795371 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn" event={"ID":"b56656be-6638-40df-b380-d88dbc891f53","Type":"ContainerDied","Data":"f5b4648b8c7c5264a3074ffa88271d32be0e76bf7dd6102d7b60719a2c1e3935"} Oct 08 07:29:53 crc kubenswrapper[4693]: I1008 07:29:53.815345 4693 generic.go:334] "Generic (PLEG): container finished" podID="b56656be-6638-40df-b380-d88dbc891f53" containerID="294ec7f37b61152d8d75b5e55b7d28c3521accf8c252c87642c59f87d826554b" exitCode=0 Oct 08 07:29:53 crc kubenswrapper[4693]: I1008 07:29:53.815394 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn" event={"ID":"b56656be-6638-40df-b380-d88dbc891f53","Type":"ContainerDied","Data":"294ec7f37b61152d8d75b5e55b7d28c3521accf8c252c87642c59f87d826554b"} Oct 08 07:29:54 crc kubenswrapper[4693]: I1008 07:29:54.826313 4693 generic.go:334] "Generic (PLEG): container finished" podID="b56656be-6638-40df-b380-d88dbc891f53" containerID="188586f2b1e1c8bf85b82c8881e2aeb9792797dd76d0d57ad74f49bad706777b" exitCode=0 Oct 08 07:29:54 crc kubenswrapper[4693]: I1008 07:29:54.826377 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn" event={"ID":"b56656be-6638-40df-b380-d88dbc891f53","Type":"ContainerDied","Data":"188586f2b1e1c8bf85b82c8881e2aeb9792797dd76d0d57ad74f49bad706777b"} Oct 08 07:29:55 crc kubenswrapper[4693]: I1008 07:29:55.856137 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wdmx9"] Oct 08 07:29:55 crc kubenswrapper[4693]: I1008 07:29:55.858045 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wdmx9" Oct 08 07:29:55 crc kubenswrapper[4693]: I1008 07:29:55.894652 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wdmx9"] Oct 08 07:29:56 crc kubenswrapper[4693]: I1008 07:29:56.034014 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83f5be17-1056-4b57-9644-aa9271f20ab7-catalog-content\") pod \"redhat-marketplace-wdmx9\" (UID: \"83f5be17-1056-4b57-9644-aa9271f20ab7\") " pod="openshift-marketplace/redhat-marketplace-wdmx9" Oct 08 07:29:56 crc kubenswrapper[4693]: I1008 07:29:56.034080 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59cmn\" (UniqueName: \"kubernetes.io/projected/83f5be17-1056-4b57-9644-aa9271f20ab7-kube-api-access-59cmn\") pod \"redhat-marketplace-wdmx9\" (UID: \"83f5be17-1056-4b57-9644-aa9271f20ab7\") " pod="openshift-marketplace/redhat-marketplace-wdmx9" Oct 08 07:29:56 crc kubenswrapper[4693]: I1008 07:29:56.034149 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83f5be17-1056-4b57-9644-aa9271f20ab7-utilities\") pod \"redhat-marketplace-wdmx9\" (UID: \"83f5be17-1056-4b57-9644-aa9271f20ab7\") " pod="openshift-marketplace/redhat-marketplace-wdmx9" Oct 08 07:29:56 crc kubenswrapper[4693]: I1008 07:29:56.135324 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83f5be17-1056-4b57-9644-aa9271f20ab7-utilities\") pod \"redhat-marketplace-wdmx9\" (UID: \"83f5be17-1056-4b57-9644-aa9271f20ab7\") " pod="openshift-marketplace/redhat-marketplace-wdmx9" Oct 08 07:29:56 crc kubenswrapper[4693]: I1008 07:29:56.135456 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83f5be17-1056-4b57-9644-aa9271f20ab7-catalog-content\") pod \"redhat-marketplace-wdmx9\" (UID: \"83f5be17-1056-4b57-9644-aa9271f20ab7\") " pod="openshift-marketplace/redhat-marketplace-wdmx9" Oct 08 07:29:56 crc kubenswrapper[4693]: I1008 07:29:56.135486 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59cmn\" (UniqueName: \"kubernetes.io/projected/83f5be17-1056-4b57-9644-aa9271f20ab7-kube-api-access-59cmn\") pod \"redhat-marketplace-wdmx9\" (UID: \"83f5be17-1056-4b57-9644-aa9271f20ab7\") " pod="openshift-marketplace/redhat-marketplace-wdmx9" Oct 08 07:29:56 crc kubenswrapper[4693]: I1008 07:29:56.135796 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83f5be17-1056-4b57-9644-aa9271f20ab7-utilities\") pod \"redhat-marketplace-wdmx9\" (UID: \"83f5be17-1056-4b57-9644-aa9271f20ab7\") " pod="openshift-marketplace/redhat-marketplace-wdmx9" Oct 08 07:29:56 crc kubenswrapper[4693]: I1008 07:29:56.135991 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83f5be17-1056-4b57-9644-aa9271f20ab7-catalog-content\") pod \"redhat-marketplace-wdmx9\" (UID: \"83f5be17-1056-4b57-9644-aa9271f20ab7\") " pod="openshift-marketplace/redhat-marketplace-wdmx9" Oct 08 07:29:56 crc kubenswrapper[4693]: I1008 07:29:56.159766 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59cmn\" (UniqueName: \"kubernetes.io/projected/83f5be17-1056-4b57-9644-aa9271f20ab7-kube-api-access-59cmn\") pod \"redhat-marketplace-wdmx9\" (UID: \"83f5be17-1056-4b57-9644-aa9271f20ab7\") " pod="openshift-marketplace/redhat-marketplace-wdmx9" Oct 08 07:29:56 crc kubenswrapper[4693]: I1008 07:29:56.181197 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wdmx9" Oct 08 07:29:56 crc kubenswrapper[4693]: I1008 07:29:56.261670 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn" Oct 08 07:29:56 crc kubenswrapper[4693]: I1008 07:29:56.339515 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b56656be-6638-40df-b380-d88dbc891f53-bundle\") pod \"b56656be-6638-40df-b380-d88dbc891f53\" (UID: \"b56656be-6638-40df-b380-d88dbc891f53\") " Oct 08 07:29:56 crc kubenswrapper[4693]: I1008 07:29:56.339641 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b56656be-6638-40df-b380-d88dbc891f53-util\") pod \"b56656be-6638-40df-b380-d88dbc891f53\" (UID: \"b56656be-6638-40df-b380-d88dbc891f53\") " Oct 08 07:29:56 crc kubenswrapper[4693]: I1008 07:29:56.339662 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dzb9n\" (UniqueName: \"kubernetes.io/projected/b56656be-6638-40df-b380-d88dbc891f53-kube-api-access-dzb9n\") pod \"b56656be-6638-40df-b380-d88dbc891f53\" (UID: \"b56656be-6638-40df-b380-d88dbc891f53\") " Oct 08 07:29:56 crc kubenswrapper[4693]: I1008 07:29:56.341268 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b56656be-6638-40df-b380-d88dbc891f53-bundle" (OuterVolumeSpecName: "bundle") pod "b56656be-6638-40df-b380-d88dbc891f53" (UID: "b56656be-6638-40df-b380-d88dbc891f53"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:29:56 crc kubenswrapper[4693]: I1008 07:29:56.357213 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b56656be-6638-40df-b380-d88dbc891f53-util" (OuterVolumeSpecName: "util") pod "b56656be-6638-40df-b380-d88dbc891f53" (UID: "b56656be-6638-40df-b380-d88dbc891f53"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:29:56 crc kubenswrapper[4693]: I1008 07:29:56.358716 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b56656be-6638-40df-b380-d88dbc891f53-kube-api-access-dzb9n" (OuterVolumeSpecName: "kube-api-access-dzb9n") pod "b56656be-6638-40df-b380-d88dbc891f53" (UID: "b56656be-6638-40df-b380-d88dbc891f53"). InnerVolumeSpecName "kube-api-access-dzb9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:29:56 crc kubenswrapper[4693]: I1008 07:29:56.441003 4693 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b56656be-6638-40df-b380-d88dbc891f53-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:29:56 crc kubenswrapper[4693]: I1008 07:29:56.441033 4693 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b56656be-6638-40df-b380-d88dbc891f53-util\") on node \"crc\" DevicePath \"\"" Oct 08 07:29:56 crc kubenswrapper[4693]: I1008 07:29:56.441043 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dzb9n\" (UniqueName: \"kubernetes.io/projected/b56656be-6638-40df-b380-d88dbc891f53-kube-api-access-dzb9n\") on node \"crc\" DevicePath \"\"" Oct 08 07:29:56 crc kubenswrapper[4693]: I1008 07:29:56.647030 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wdmx9"] Oct 08 07:29:56 crc kubenswrapper[4693]: W1008 07:29:56.652117 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod83f5be17_1056_4b57_9644_aa9271f20ab7.slice/crio-fac5a0ea40c20bca175f782e115fa551036beb94f2b2b5a3dacf36b4d6dfcf0b WatchSource:0}: Error finding container fac5a0ea40c20bca175f782e115fa551036beb94f2b2b5a3dacf36b4d6dfcf0b: Status 404 returned error can't find the container with id fac5a0ea40c20bca175f782e115fa551036beb94f2b2b5a3dacf36b4d6dfcf0b Oct 08 07:29:56 crc kubenswrapper[4693]: I1008 07:29:56.844404 4693 generic.go:334] "Generic (PLEG): container finished" podID="83f5be17-1056-4b57-9644-aa9271f20ab7" containerID="5334c9136d0f08f553950fb96ab29ba8f3a6d779d598137d90ff079c69a86a4b" exitCode=0 Oct 08 07:29:56 crc kubenswrapper[4693]: I1008 07:29:56.844476 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wdmx9" event={"ID":"83f5be17-1056-4b57-9644-aa9271f20ab7","Type":"ContainerDied","Data":"5334c9136d0f08f553950fb96ab29ba8f3a6d779d598137d90ff079c69a86a4b"} Oct 08 07:29:56 crc kubenswrapper[4693]: I1008 07:29:56.845105 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wdmx9" event={"ID":"83f5be17-1056-4b57-9644-aa9271f20ab7","Type":"ContainerStarted","Data":"fac5a0ea40c20bca175f782e115fa551036beb94f2b2b5a3dacf36b4d6dfcf0b"} Oct 08 07:29:56 crc kubenswrapper[4693]: I1008 07:29:56.848194 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn" event={"ID":"b56656be-6638-40df-b380-d88dbc891f53","Type":"ContainerDied","Data":"9abf19b58f188f5542218e650cf9b3daa88e49cdb5b909d82bda14c932122626"} Oct 08 07:29:56 crc kubenswrapper[4693]: I1008 07:29:56.848226 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9abf19b58f188f5542218e650cf9b3daa88e49cdb5b909d82bda14c932122626" Oct 08 07:29:56 crc kubenswrapper[4693]: I1008 07:29:56.848307 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn" Oct 08 07:29:57 crc kubenswrapper[4693]: I1008 07:29:57.856514 4693 generic.go:334] "Generic (PLEG): container finished" podID="83f5be17-1056-4b57-9644-aa9271f20ab7" containerID="210c1186e5b347e71b1dd48be9ff77176eed7a30fa631aec99dabf1e6b1f99a9" exitCode=0 Oct 08 07:29:57 crc kubenswrapper[4693]: I1008 07:29:57.856653 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wdmx9" event={"ID":"83f5be17-1056-4b57-9644-aa9271f20ab7","Type":"ContainerDied","Data":"210c1186e5b347e71b1dd48be9ff77176eed7a30fa631aec99dabf1e6b1f99a9"} Oct 08 07:29:58 crc kubenswrapper[4693]: I1008 07:29:58.868585 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wdmx9" event={"ID":"83f5be17-1056-4b57-9644-aa9271f20ab7","Type":"ContainerStarted","Data":"640ecaace997e0cf60ebb526516b7ddad12cc3fb7a56ba596f363ac9e3aa83d3"} Oct 08 07:29:58 crc kubenswrapper[4693]: I1008 07:29:58.899743 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wdmx9" podStartSLOduration=2.335120042 podStartE2EDuration="3.899714293s" podCreationTimestamp="2025-10-08 07:29:55 +0000 UTC" firstStartedPulling="2025-10-08 07:29:56.846420776 +0000 UTC m=+782.217385711" lastFinishedPulling="2025-10-08 07:29:58.411015017 +0000 UTC m=+783.781979962" observedRunningTime="2025-10-08 07:29:58.891805563 +0000 UTC m=+784.262770558" watchObservedRunningTime="2025-10-08 07:29:58.899714293 +0000 UTC m=+784.270679268" Oct 08 07:30:00 crc kubenswrapper[4693]: I1008 07:30:00.173364 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331810-cln5m"] Oct 08 07:30:00 crc kubenswrapper[4693]: E1008 07:30:00.174300 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b56656be-6638-40df-b380-d88dbc891f53" containerName="util" Oct 08 07:30:00 crc kubenswrapper[4693]: I1008 07:30:00.174331 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="b56656be-6638-40df-b380-d88dbc891f53" containerName="util" Oct 08 07:30:00 crc kubenswrapper[4693]: E1008 07:30:00.174563 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b56656be-6638-40df-b380-d88dbc891f53" containerName="extract" Oct 08 07:30:00 crc kubenswrapper[4693]: I1008 07:30:00.174597 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="b56656be-6638-40df-b380-d88dbc891f53" containerName="extract" Oct 08 07:30:00 crc kubenswrapper[4693]: E1008 07:30:00.174707 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b56656be-6638-40df-b380-d88dbc891f53" containerName="pull" Oct 08 07:30:00 crc kubenswrapper[4693]: I1008 07:30:00.174729 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="b56656be-6638-40df-b380-d88dbc891f53" containerName="pull" Oct 08 07:30:00 crc kubenswrapper[4693]: I1008 07:30:00.176243 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="b56656be-6638-40df-b380-d88dbc891f53" containerName="extract" Oct 08 07:30:00 crc kubenswrapper[4693]: I1008 07:30:00.176803 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331810-cln5m" Oct 08 07:30:00 crc kubenswrapper[4693]: I1008 07:30:00.180869 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 08 07:30:00 crc kubenswrapper[4693]: I1008 07:30:00.181009 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 08 07:30:00 crc kubenswrapper[4693]: I1008 07:30:00.184958 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331810-cln5m"] Oct 08 07:30:00 crc kubenswrapper[4693]: I1008 07:30:00.294973 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mb9x8\" (UniqueName: \"kubernetes.io/projected/2c332e0a-56dc-480e-a857-62665c11bbf9-kube-api-access-mb9x8\") pod \"collect-profiles-29331810-cln5m\" (UID: \"2c332e0a-56dc-480e-a857-62665c11bbf9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331810-cln5m" Oct 08 07:30:00 crc kubenswrapper[4693]: I1008 07:30:00.295024 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2c332e0a-56dc-480e-a857-62665c11bbf9-config-volume\") pod \"collect-profiles-29331810-cln5m\" (UID: \"2c332e0a-56dc-480e-a857-62665c11bbf9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331810-cln5m" Oct 08 07:30:00 crc kubenswrapper[4693]: I1008 07:30:00.295130 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2c332e0a-56dc-480e-a857-62665c11bbf9-secret-volume\") pod \"collect-profiles-29331810-cln5m\" (UID: \"2c332e0a-56dc-480e-a857-62665c11bbf9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331810-cln5m" Oct 08 07:30:00 crc kubenswrapper[4693]: I1008 07:30:00.396013 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mb9x8\" (UniqueName: \"kubernetes.io/projected/2c332e0a-56dc-480e-a857-62665c11bbf9-kube-api-access-mb9x8\") pod \"collect-profiles-29331810-cln5m\" (UID: \"2c332e0a-56dc-480e-a857-62665c11bbf9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331810-cln5m" Oct 08 07:30:00 crc kubenswrapper[4693]: I1008 07:30:00.396064 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2c332e0a-56dc-480e-a857-62665c11bbf9-config-volume\") pod \"collect-profiles-29331810-cln5m\" (UID: \"2c332e0a-56dc-480e-a857-62665c11bbf9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331810-cln5m" Oct 08 07:30:00 crc kubenswrapper[4693]: I1008 07:30:00.396158 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2c332e0a-56dc-480e-a857-62665c11bbf9-secret-volume\") pod \"collect-profiles-29331810-cln5m\" (UID: \"2c332e0a-56dc-480e-a857-62665c11bbf9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331810-cln5m" Oct 08 07:30:00 crc kubenswrapper[4693]: I1008 07:30:00.398043 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2c332e0a-56dc-480e-a857-62665c11bbf9-config-volume\") pod \"collect-profiles-29331810-cln5m\" (UID: \"2c332e0a-56dc-480e-a857-62665c11bbf9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331810-cln5m" Oct 08 07:30:00 crc kubenswrapper[4693]: I1008 07:30:00.406964 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2c332e0a-56dc-480e-a857-62665c11bbf9-secret-volume\") pod \"collect-profiles-29331810-cln5m\" (UID: \"2c332e0a-56dc-480e-a857-62665c11bbf9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331810-cln5m" Oct 08 07:30:00 crc kubenswrapper[4693]: I1008 07:30:00.435182 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mb9x8\" (UniqueName: \"kubernetes.io/projected/2c332e0a-56dc-480e-a857-62665c11bbf9-kube-api-access-mb9x8\") pod \"collect-profiles-29331810-cln5m\" (UID: \"2c332e0a-56dc-480e-a857-62665c11bbf9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331810-cln5m" Oct 08 07:30:00 crc kubenswrapper[4693]: I1008 07:30:00.501217 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331810-cln5m" Oct 08 07:30:00 crc kubenswrapper[4693]: I1008 07:30:00.987980 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331810-cln5m"] Oct 08 07:30:01 crc kubenswrapper[4693]: I1008 07:30:01.792281 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-77c8f66d44-864bb"] Oct 08 07:30:01 crc kubenswrapper[4693]: I1008 07:30:01.793730 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-77c8f66d44-864bb" Oct 08 07:30:01 crc kubenswrapper[4693]: I1008 07:30:01.796189 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-gzdcs" Oct 08 07:30:01 crc kubenswrapper[4693]: I1008 07:30:01.839649 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-77c8f66d44-864bb"] Oct 08 07:30:01 crc kubenswrapper[4693]: I1008 07:30:01.887769 4693 generic.go:334] "Generic (PLEG): container finished" podID="2c332e0a-56dc-480e-a857-62665c11bbf9" containerID="9ea1368c3b1f4f4f6f0e7564adfd19cf07011c2063d4f1491826a5438e1f11dc" exitCode=0 Oct 08 07:30:01 crc kubenswrapper[4693]: I1008 07:30:01.887813 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331810-cln5m" event={"ID":"2c332e0a-56dc-480e-a857-62665c11bbf9","Type":"ContainerDied","Data":"9ea1368c3b1f4f4f6f0e7564adfd19cf07011c2063d4f1491826a5438e1f11dc"} Oct 08 07:30:01 crc kubenswrapper[4693]: I1008 07:30:01.887862 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331810-cln5m" event={"ID":"2c332e0a-56dc-480e-a857-62665c11bbf9","Type":"ContainerStarted","Data":"c5738270a385aa3c857d5bfd07838d78f7167d461bf545ab840a18843f706fdc"} Oct 08 07:30:01 crc kubenswrapper[4693]: I1008 07:30:01.922153 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqzn4\" (UniqueName: \"kubernetes.io/projected/23996d41-f11a-4a8a-8a71-3e7f93978efc-kube-api-access-cqzn4\") pod \"openstack-operator-controller-operator-77c8f66d44-864bb\" (UID: \"23996d41-f11a-4a8a-8a71-3e7f93978efc\") " pod="openstack-operators/openstack-operator-controller-operator-77c8f66d44-864bb" Oct 08 07:30:02 crc kubenswrapper[4693]: I1008 07:30:02.022901 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqzn4\" (UniqueName: \"kubernetes.io/projected/23996d41-f11a-4a8a-8a71-3e7f93978efc-kube-api-access-cqzn4\") pod \"openstack-operator-controller-operator-77c8f66d44-864bb\" (UID: \"23996d41-f11a-4a8a-8a71-3e7f93978efc\") " pod="openstack-operators/openstack-operator-controller-operator-77c8f66d44-864bb" Oct 08 07:30:02 crc kubenswrapper[4693]: I1008 07:30:02.046164 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqzn4\" (UniqueName: \"kubernetes.io/projected/23996d41-f11a-4a8a-8a71-3e7f93978efc-kube-api-access-cqzn4\") pod \"openstack-operator-controller-operator-77c8f66d44-864bb\" (UID: \"23996d41-f11a-4a8a-8a71-3e7f93978efc\") " pod="openstack-operators/openstack-operator-controller-operator-77c8f66d44-864bb" Oct 08 07:30:02 crc kubenswrapper[4693]: I1008 07:30:02.113017 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-77c8f66d44-864bb" Oct 08 07:30:02 crc kubenswrapper[4693]: I1008 07:30:02.573032 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-77c8f66d44-864bb"] Oct 08 07:30:02 crc kubenswrapper[4693]: I1008 07:30:02.895941 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-77c8f66d44-864bb" event={"ID":"23996d41-f11a-4a8a-8a71-3e7f93978efc","Type":"ContainerStarted","Data":"898ac4072e29f5ffc630c08bbae8d25dd8e401420b80a00c9f52e2d5ce6a1b29"} Oct 08 07:30:03 crc kubenswrapper[4693]: I1008 07:30:03.157596 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331810-cln5m" Oct 08 07:30:03 crc kubenswrapper[4693]: I1008 07:30:03.241716 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2c332e0a-56dc-480e-a857-62665c11bbf9-secret-volume\") pod \"2c332e0a-56dc-480e-a857-62665c11bbf9\" (UID: \"2c332e0a-56dc-480e-a857-62665c11bbf9\") " Oct 08 07:30:03 crc kubenswrapper[4693]: I1008 07:30:03.242102 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mb9x8\" (UniqueName: \"kubernetes.io/projected/2c332e0a-56dc-480e-a857-62665c11bbf9-kube-api-access-mb9x8\") pod \"2c332e0a-56dc-480e-a857-62665c11bbf9\" (UID: \"2c332e0a-56dc-480e-a857-62665c11bbf9\") " Oct 08 07:30:03 crc kubenswrapper[4693]: I1008 07:30:03.242143 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2c332e0a-56dc-480e-a857-62665c11bbf9-config-volume\") pod \"2c332e0a-56dc-480e-a857-62665c11bbf9\" (UID: \"2c332e0a-56dc-480e-a857-62665c11bbf9\") " Oct 08 07:30:03 crc kubenswrapper[4693]: I1008 07:30:03.243041 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c332e0a-56dc-480e-a857-62665c11bbf9-config-volume" (OuterVolumeSpecName: "config-volume") pod "2c332e0a-56dc-480e-a857-62665c11bbf9" (UID: "2c332e0a-56dc-480e-a857-62665c11bbf9"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:30:03 crc kubenswrapper[4693]: I1008 07:30:03.249005 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c332e0a-56dc-480e-a857-62665c11bbf9-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2c332e0a-56dc-480e-a857-62665c11bbf9" (UID: "2c332e0a-56dc-480e-a857-62665c11bbf9"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:30:03 crc kubenswrapper[4693]: I1008 07:30:03.249853 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c332e0a-56dc-480e-a857-62665c11bbf9-kube-api-access-mb9x8" (OuterVolumeSpecName: "kube-api-access-mb9x8") pod "2c332e0a-56dc-480e-a857-62665c11bbf9" (UID: "2c332e0a-56dc-480e-a857-62665c11bbf9"). InnerVolumeSpecName "kube-api-access-mb9x8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:30:03 crc kubenswrapper[4693]: I1008 07:30:03.343572 4693 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2c332e0a-56dc-480e-a857-62665c11bbf9-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 08 07:30:03 crc kubenswrapper[4693]: I1008 07:30:03.343604 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mb9x8\" (UniqueName: \"kubernetes.io/projected/2c332e0a-56dc-480e-a857-62665c11bbf9-kube-api-access-mb9x8\") on node \"crc\" DevicePath \"\"" Oct 08 07:30:03 crc kubenswrapper[4693]: I1008 07:30:03.343613 4693 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2c332e0a-56dc-480e-a857-62665c11bbf9-config-volume\") on node \"crc\" DevicePath \"\"" Oct 08 07:30:03 crc kubenswrapper[4693]: I1008 07:30:03.903468 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331810-cln5m" event={"ID":"2c332e0a-56dc-480e-a857-62665c11bbf9","Type":"ContainerDied","Data":"c5738270a385aa3c857d5bfd07838d78f7167d461bf545ab840a18843f706fdc"} Oct 08 07:30:03 crc kubenswrapper[4693]: I1008 07:30:03.903506 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c5738270a385aa3c857d5bfd07838d78f7167d461bf545ab840a18843f706fdc" Oct 08 07:30:03 crc kubenswrapper[4693]: I1008 07:30:03.903560 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331810-cln5m" Oct 08 07:30:06 crc kubenswrapper[4693]: I1008 07:30:06.182127 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wdmx9" Oct 08 07:30:06 crc kubenswrapper[4693]: I1008 07:30:06.182785 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wdmx9" Oct 08 07:30:06 crc kubenswrapper[4693]: I1008 07:30:06.242001 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wdmx9" Oct 08 07:30:06 crc kubenswrapper[4693]: I1008 07:30:06.924246 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-77c8f66d44-864bb" event={"ID":"23996d41-f11a-4a8a-8a71-3e7f93978efc","Type":"ContainerStarted","Data":"86ffe2c448a94246a30ae54fd74dc23b22e518cbf878a3993295db380834431d"} Oct 08 07:30:06 crc kubenswrapper[4693]: I1008 07:30:06.981303 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wdmx9" Oct 08 07:30:08 crc kubenswrapper[4693]: I1008 07:30:08.633387 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wdmx9"] Oct 08 07:30:09 crc kubenswrapper[4693]: I1008 07:30:09.961506 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wdmx9" podUID="83f5be17-1056-4b57-9644-aa9271f20ab7" containerName="registry-server" containerID="cri-o://640ecaace997e0cf60ebb526516b7ddad12cc3fb7a56ba596f363ac9e3aa83d3" gracePeriod=2 Oct 08 07:30:09 crc kubenswrapper[4693]: I1008 07:30:09.962272 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-77c8f66d44-864bb" event={"ID":"23996d41-f11a-4a8a-8a71-3e7f93978efc","Type":"ContainerStarted","Data":"5e102d6dd8e40f8241d8c5c1f5e4798cac2029b8913042e3d4e223f4c97813a2"} Oct 08 07:30:09 crc kubenswrapper[4693]: I1008 07:30:09.962297 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-77c8f66d44-864bb" Oct 08 07:30:10 crc kubenswrapper[4693]: I1008 07:30:10.018535 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-77c8f66d44-864bb" podStartSLOduration=2.649769599 podStartE2EDuration="9.018505622s" podCreationTimestamp="2025-10-08 07:30:01 +0000 UTC" firstStartedPulling="2025-10-08 07:30:02.586428797 +0000 UTC m=+787.957393742" lastFinishedPulling="2025-10-08 07:30:08.95516482 +0000 UTC m=+794.326129765" observedRunningTime="2025-10-08 07:30:10.016024497 +0000 UTC m=+795.386989472" watchObservedRunningTime="2025-10-08 07:30:10.018505622 +0000 UTC m=+795.389470597" Oct 08 07:30:10 crc kubenswrapper[4693]: I1008 07:30:10.453084 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wdmx9" Oct 08 07:30:10 crc kubenswrapper[4693]: I1008 07:30:10.550098 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59cmn\" (UniqueName: \"kubernetes.io/projected/83f5be17-1056-4b57-9644-aa9271f20ab7-kube-api-access-59cmn\") pod \"83f5be17-1056-4b57-9644-aa9271f20ab7\" (UID: \"83f5be17-1056-4b57-9644-aa9271f20ab7\") " Oct 08 07:30:10 crc kubenswrapper[4693]: I1008 07:30:10.550200 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83f5be17-1056-4b57-9644-aa9271f20ab7-catalog-content\") pod \"83f5be17-1056-4b57-9644-aa9271f20ab7\" (UID: \"83f5be17-1056-4b57-9644-aa9271f20ab7\") " Oct 08 07:30:10 crc kubenswrapper[4693]: I1008 07:30:10.550275 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83f5be17-1056-4b57-9644-aa9271f20ab7-utilities\") pod \"83f5be17-1056-4b57-9644-aa9271f20ab7\" (UID: \"83f5be17-1056-4b57-9644-aa9271f20ab7\") " Oct 08 07:30:10 crc kubenswrapper[4693]: I1008 07:30:10.551966 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83f5be17-1056-4b57-9644-aa9271f20ab7-utilities" (OuterVolumeSpecName: "utilities") pod "83f5be17-1056-4b57-9644-aa9271f20ab7" (UID: "83f5be17-1056-4b57-9644-aa9271f20ab7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:30:10 crc kubenswrapper[4693]: I1008 07:30:10.556459 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83f5be17-1056-4b57-9644-aa9271f20ab7-kube-api-access-59cmn" (OuterVolumeSpecName: "kube-api-access-59cmn") pod "83f5be17-1056-4b57-9644-aa9271f20ab7" (UID: "83f5be17-1056-4b57-9644-aa9271f20ab7"). InnerVolumeSpecName "kube-api-access-59cmn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:30:10 crc kubenswrapper[4693]: I1008 07:30:10.568746 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83f5be17-1056-4b57-9644-aa9271f20ab7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "83f5be17-1056-4b57-9644-aa9271f20ab7" (UID: "83f5be17-1056-4b57-9644-aa9271f20ab7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:30:10 crc kubenswrapper[4693]: I1008 07:30:10.651561 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83f5be17-1056-4b57-9644-aa9271f20ab7-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 07:30:10 crc kubenswrapper[4693]: I1008 07:30:10.651610 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83f5be17-1056-4b57-9644-aa9271f20ab7-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 07:30:10 crc kubenswrapper[4693]: I1008 07:30:10.651710 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59cmn\" (UniqueName: \"kubernetes.io/projected/83f5be17-1056-4b57-9644-aa9271f20ab7-kube-api-access-59cmn\") on node \"crc\" DevicePath \"\"" Oct 08 07:30:10 crc kubenswrapper[4693]: I1008 07:30:10.972471 4693 generic.go:334] "Generic (PLEG): container finished" podID="83f5be17-1056-4b57-9644-aa9271f20ab7" containerID="640ecaace997e0cf60ebb526516b7ddad12cc3fb7a56ba596f363ac9e3aa83d3" exitCode=0 Oct 08 07:30:10 crc kubenswrapper[4693]: I1008 07:30:10.972531 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wdmx9" event={"ID":"83f5be17-1056-4b57-9644-aa9271f20ab7","Type":"ContainerDied","Data":"640ecaace997e0cf60ebb526516b7ddad12cc3fb7a56ba596f363ac9e3aa83d3"} Oct 08 07:30:10 crc kubenswrapper[4693]: I1008 07:30:10.972583 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wdmx9" Oct 08 07:30:10 crc kubenswrapper[4693]: I1008 07:30:10.972590 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wdmx9" event={"ID":"83f5be17-1056-4b57-9644-aa9271f20ab7","Type":"ContainerDied","Data":"fac5a0ea40c20bca175f782e115fa551036beb94f2b2b5a3dacf36b4d6dfcf0b"} Oct 08 07:30:10 crc kubenswrapper[4693]: I1008 07:30:10.972605 4693 scope.go:117] "RemoveContainer" containerID="640ecaace997e0cf60ebb526516b7ddad12cc3fb7a56ba596f363ac9e3aa83d3" Oct 08 07:30:11 crc kubenswrapper[4693]: I1008 07:30:11.004133 4693 scope.go:117] "RemoveContainer" containerID="210c1186e5b347e71b1dd48be9ff77176eed7a30fa631aec99dabf1e6b1f99a9" Oct 08 07:30:11 crc kubenswrapper[4693]: I1008 07:30:11.027544 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wdmx9"] Oct 08 07:30:11 crc kubenswrapper[4693]: I1008 07:30:11.036931 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wdmx9"] Oct 08 07:30:11 crc kubenswrapper[4693]: I1008 07:30:11.039092 4693 scope.go:117] "RemoveContainer" containerID="5334c9136d0f08f553950fb96ab29ba8f3a6d779d598137d90ff079c69a86a4b" Oct 08 07:30:11 crc kubenswrapper[4693]: I1008 07:30:11.074603 4693 scope.go:117] "RemoveContainer" containerID="640ecaace997e0cf60ebb526516b7ddad12cc3fb7a56ba596f363ac9e3aa83d3" Oct 08 07:30:11 crc kubenswrapper[4693]: E1008 07:30:11.075012 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"640ecaace997e0cf60ebb526516b7ddad12cc3fb7a56ba596f363ac9e3aa83d3\": container with ID starting with 640ecaace997e0cf60ebb526516b7ddad12cc3fb7a56ba596f363ac9e3aa83d3 not found: ID does not exist" containerID="640ecaace997e0cf60ebb526516b7ddad12cc3fb7a56ba596f363ac9e3aa83d3" Oct 08 07:30:11 crc kubenswrapper[4693]: I1008 07:30:11.075034 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"640ecaace997e0cf60ebb526516b7ddad12cc3fb7a56ba596f363ac9e3aa83d3"} err="failed to get container status \"640ecaace997e0cf60ebb526516b7ddad12cc3fb7a56ba596f363ac9e3aa83d3\": rpc error: code = NotFound desc = could not find container \"640ecaace997e0cf60ebb526516b7ddad12cc3fb7a56ba596f363ac9e3aa83d3\": container with ID starting with 640ecaace997e0cf60ebb526516b7ddad12cc3fb7a56ba596f363ac9e3aa83d3 not found: ID does not exist" Oct 08 07:30:11 crc kubenswrapper[4693]: I1008 07:30:11.075052 4693 scope.go:117] "RemoveContainer" containerID="210c1186e5b347e71b1dd48be9ff77176eed7a30fa631aec99dabf1e6b1f99a9" Oct 08 07:30:11 crc kubenswrapper[4693]: E1008 07:30:11.075288 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"210c1186e5b347e71b1dd48be9ff77176eed7a30fa631aec99dabf1e6b1f99a9\": container with ID starting with 210c1186e5b347e71b1dd48be9ff77176eed7a30fa631aec99dabf1e6b1f99a9 not found: ID does not exist" containerID="210c1186e5b347e71b1dd48be9ff77176eed7a30fa631aec99dabf1e6b1f99a9" Oct 08 07:30:11 crc kubenswrapper[4693]: I1008 07:30:11.075302 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"210c1186e5b347e71b1dd48be9ff77176eed7a30fa631aec99dabf1e6b1f99a9"} err="failed to get container status \"210c1186e5b347e71b1dd48be9ff77176eed7a30fa631aec99dabf1e6b1f99a9\": rpc error: code = NotFound desc = could not find container \"210c1186e5b347e71b1dd48be9ff77176eed7a30fa631aec99dabf1e6b1f99a9\": container with ID starting with 210c1186e5b347e71b1dd48be9ff77176eed7a30fa631aec99dabf1e6b1f99a9 not found: ID does not exist" Oct 08 07:30:11 crc kubenswrapper[4693]: I1008 07:30:11.075314 4693 scope.go:117] "RemoveContainer" containerID="5334c9136d0f08f553950fb96ab29ba8f3a6d779d598137d90ff079c69a86a4b" Oct 08 07:30:11 crc kubenswrapper[4693]: E1008 07:30:11.075678 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5334c9136d0f08f553950fb96ab29ba8f3a6d779d598137d90ff079c69a86a4b\": container with ID starting with 5334c9136d0f08f553950fb96ab29ba8f3a6d779d598137d90ff079c69a86a4b not found: ID does not exist" containerID="5334c9136d0f08f553950fb96ab29ba8f3a6d779d598137d90ff079c69a86a4b" Oct 08 07:30:11 crc kubenswrapper[4693]: I1008 07:30:11.075695 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5334c9136d0f08f553950fb96ab29ba8f3a6d779d598137d90ff079c69a86a4b"} err="failed to get container status \"5334c9136d0f08f553950fb96ab29ba8f3a6d779d598137d90ff079c69a86a4b\": rpc error: code = NotFound desc = could not find container \"5334c9136d0f08f553950fb96ab29ba8f3a6d779d598137d90ff079c69a86a4b\": container with ID starting with 5334c9136d0f08f553950fb96ab29ba8f3a6d779d598137d90ff079c69a86a4b not found: ID does not exist" Oct 08 07:30:11 crc kubenswrapper[4693]: I1008 07:30:11.378971 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83f5be17-1056-4b57-9644-aa9271f20ab7" path="/var/lib/kubelet/pods/83f5be17-1056-4b57-9644-aa9271f20ab7/volumes" Oct 08 07:30:12 crc kubenswrapper[4693]: I1008 07:30:12.116670 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-77c8f66d44-864bb" Oct 08 07:30:13 crc kubenswrapper[4693]: I1008 07:30:13.641649 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-d8lh8"] Oct 08 07:30:13 crc kubenswrapper[4693]: E1008 07:30:13.642051 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83f5be17-1056-4b57-9644-aa9271f20ab7" containerName="extract-utilities" Oct 08 07:30:13 crc kubenswrapper[4693]: I1008 07:30:13.642074 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="83f5be17-1056-4b57-9644-aa9271f20ab7" containerName="extract-utilities" Oct 08 07:30:13 crc kubenswrapper[4693]: E1008 07:30:13.642105 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83f5be17-1056-4b57-9644-aa9271f20ab7" containerName="registry-server" Oct 08 07:30:13 crc kubenswrapper[4693]: I1008 07:30:13.642119 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="83f5be17-1056-4b57-9644-aa9271f20ab7" containerName="registry-server" Oct 08 07:30:13 crc kubenswrapper[4693]: E1008 07:30:13.642142 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c332e0a-56dc-480e-a857-62665c11bbf9" containerName="collect-profiles" Oct 08 07:30:13 crc kubenswrapper[4693]: I1008 07:30:13.642155 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c332e0a-56dc-480e-a857-62665c11bbf9" containerName="collect-profiles" Oct 08 07:30:13 crc kubenswrapper[4693]: E1008 07:30:13.642173 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83f5be17-1056-4b57-9644-aa9271f20ab7" containerName="extract-content" Oct 08 07:30:13 crc kubenswrapper[4693]: I1008 07:30:13.642186 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="83f5be17-1056-4b57-9644-aa9271f20ab7" containerName="extract-content" Oct 08 07:30:13 crc kubenswrapper[4693]: I1008 07:30:13.642374 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="83f5be17-1056-4b57-9644-aa9271f20ab7" containerName="registry-server" Oct 08 07:30:13 crc kubenswrapper[4693]: I1008 07:30:13.642408 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c332e0a-56dc-480e-a857-62665c11bbf9" containerName="collect-profiles" Oct 08 07:30:13 crc kubenswrapper[4693]: I1008 07:30:13.643921 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d8lh8" Oct 08 07:30:13 crc kubenswrapper[4693]: I1008 07:30:13.659456 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d8lh8"] Oct 08 07:30:13 crc kubenswrapper[4693]: I1008 07:30:13.694333 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vztj\" (UniqueName: \"kubernetes.io/projected/83d9be5a-a79a-46f8-a2c8-58bfee3df5c0-kube-api-access-6vztj\") pod \"community-operators-d8lh8\" (UID: \"83d9be5a-a79a-46f8-a2c8-58bfee3df5c0\") " pod="openshift-marketplace/community-operators-d8lh8" Oct 08 07:30:13 crc kubenswrapper[4693]: I1008 07:30:13.694401 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83d9be5a-a79a-46f8-a2c8-58bfee3df5c0-catalog-content\") pod \"community-operators-d8lh8\" (UID: \"83d9be5a-a79a-46f8-a2c8-58bfee3df5c0\") " pod="openshift-marketplace/community-operators-d8lh8" Oct 08 07:30:13 crc kubenswrapper[4693]: I1008 07:30:13.694489 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83d9be5a-a79a-46f8-a2c8-58bfee3df5c0-utilities\") pod \"community-operators-d8lh8\" (UID: \"83d9be5a-a79a-46f8-a2c8-58bfee3df5c0\") " pod="openshift-marketplace/community-operators-d8lh8" Oct 08 07:30:13 crc kubenswrapper[4693]: I1008 07:30:13.795505 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83d9be5a-a79a-46f8-a2c8-58bfee3df5c0-utilities\") pod \"community-operators-d8lh8\" (UID: \"83d9be5a-a79a-46f8-a2c8-58bfee3df5c0\") " pod="openshift-marketplace/community-operators-d8lh8" Oct 08 07:30:13 crc kubenswrapper[4693]: I1008 07:30:13.795559 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vztj\" (UniqueName: \"kubernetes.io/projected/83d9be5a-a79a-46f8-a2c8-58bfee3df5c0-kube-api-access-6vztj\") pod \"community-operators-d8lh8\" (UID: \"83d9be5a-a79a-46f8-a2c8-58bfee3df5c0\") " pod="openshift-marketplace/community-operators-d8lh8" Oct 08 07:30:13 crc kubenswrapper[4693]: I1008 07:30:13.795586 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83d9be5a-a79a-46f8-a2c8-58bfee3df5c0-catalog-content\") pod \"community-operators-d8lh8\" (UID: \"83d9be5a-a79a-46f8-a2c8-58bfee3df5c0\") " pod="openshift-marketplace/community-operators-d8lh8" Oct 08 07:30:13 crc kubenswrapper[4693]: I1008 07:30:13.795982 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83d9be5a-a79a-46f8-a2c8-58bfee3df5c0-catalog-content\") pod \"community-operators-d8lh8\" (UID: \"83d9be5a-a79a-46f8-a2c8-58bfee3df5c0\") " pod="openshift-marketplace/community-operators-d8lh8" Oct 08 07:30:13 crc kubenswrapper[4693]: I1008 07:30:13.796130 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83d9be5a-a79a-46f8-a2c8-58bfee3df5c0-utilities\") pod \"community-operators-d8lh8\" (UID: \"83d9be5a-a79a-46f8-a2c8-58bfee3df5c0\") " pod="openshift-marketplace/community-operators-d8lh8" Oct 08 07:30:13 crc kubenswrapper[4693]: I1008 07:30:13.817378 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vztj\" (UniqueName: \"kubernetes.io/projected/83d9be5a-a79a-46f8-a2c8-58bfee3df5c0-kube-api-access-6vztj\") pod \"community-operators-d8lh8\" (UID: \"83d9be5a-a79a-46f8-a2c8-58bfee3df5c0\") " pod="openshift-marketplace/community-operators-d8lh8" Oct 08 07:30:14 crc kubenswrapper[4693]: I1008 07:30:14.006334 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d8lh8" Oct 08 07:30:14 crc kubenswrapper[4693]: I1008 07:30:14.460958 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d8lh8"] Oct 08 07:30:15 crc kubenswrapper[4693]: I1008 07:30:15.004265 4693 generic.go:334] "Generic (PLEG): container finished" podID="83d9be5a-a79a-46f8-a2c8-58bfee3df5c0" containerID="05e6b9ed33c43c1db79230d5c93870d322c5db8ad7db3e9159faee9632efbc0f" exitCode=0 Oct 08 07:30:15 crc kubenswrapper[4693]: I1008 07:30:15.004356 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d8lh8" event={"ID":"83d9be5a-a79a-46f8-a2c8-58bfee3df5c0","Type":"ContainerDied","Data":"05e6b9ed33c43c1db79230d5c93870d322c5db8ad7db3e9159faee9632efbc0f"} Oct 08 07:30:15 crc kubenswrapper[4693]: I1008 07:30:15.004608 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d8lh8" event={"ID":"83d9be5a-a79a-46f8-a2c8-58bfee3df5c0","Type":"ContainerStarted","Data":"10d4959e42bc353f387ba4e16052c8b5d6b181786c0fe9761c2f98a36de8d313"} Oct 08 07:30:16 crc kubenswrapper[4693]: I1008 07:30:16.016461 4693 generic.go:334] "Generic (PLEG): container finished" podID="83d9be5a-a79a-46f8-a2c8-58bfee3df5c0" containerID="dbc355a4120ca6ce78be8bb9dd06d51d70f42f712f38857598edf4e0499746ab" exitCode=0 Oct 08 07:30:16 crc kubenswrapper[4693]: I1008 07:30:16.016569 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d8lh8" event={"ID":"83d9be5a-a79a-46f8-a2c8-58bfee3df5c0","Type":"ContainerDied","Data":"dbc355a4120ca6ce78be8bb9dd06d51d70f42f712f38857598edf4e0499746ab"} Oct 08 07:30:17 crc kubenswrapper[4693]: I1008 07:30:17.028184 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d8lh8" event={"ID":"83d9be5a-a79a-46f8-a2c8-58bfee3df5c0","Type":"ContainerStarted","Data":"8ba0e3f3727200bb062d08832c9b89e7a39966492abc8d899523d02148d72781"} Oct 08 07:30:22 crc kubenswrapper[4693]: I1008 07:30:22.582149 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-d8lh8" podStartSLOduration=8.07729579 podStartE2EDuration="9.582117596s" podCreationTimestamp="2025-10-08 07:30:13 +0000 UTC" firstStartedPulling="2025-10-08 07:30:15.006427359 +0000 UTC m=+800.377392334" lastFinishedPulling="2025-10-08 07:30:16.511249195 +0000 UTC m=+801.882214140" observedRunningTime="2025-10-08 07:30:17.050167613 +0000 UTC m=+802.421132588" watchObservedRunningTime="2025-10-08 07:30:22.582117596 +0000 UTC m=+807.953082571" Oct 08 07:30:22 crc kubenswrapper[4693]: I1008 07:30:22.584325 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nbtjc"] Oct 08 07:30:22 crc kubenswrapper[4693]: I1008 07:30:22.586877 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nbtjc" Oct 08 07:30:22 crc kubenswrapper[4693]: I1008 07:30:22.616999 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nbtjc"] Oct 08 07:30:22 crc kubenswrapper[4693]: I1008 07:30:22.723133 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d15ab2f2-caa2-4ee2-bdcb-27e69de97310-utilities\") pod \"redhat-operators-nbtjc\" (UID: \"d15ab2f2-caa2-4ee2-bdcb-27e69de97310\") " pod="openshift-marketplace/redhat-operators-nbtjc" Oct 08 07:30:22 crc kubenswrapper[4693]: I1008 07:30:22.723310 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d15ab2f2-caa2-4ee2-bdcb-27e69de97310-catalog-content\") pod \"redhat-operators-nbtjc\" (UID: \"d15ab2f2-caa2-4ee2-bdcb-27e69de97310\") " pod="openshift-marketplace/redhat-operators-nbtjc" Oct 08 07:30:22 crc kubenswrapper[4693]: I1008 07:30:22.723355 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhgxb\" (UniqueName: \"kubernetes.io/projected/d15ab2f2-caa2-4ee2-bdcb-27e69de97310-kube-api-access-rhgxb\") pod \"redhat-operators-nbtjc\" (UID: \"d15ab2f2-caa2-4ee2-bdcb-27e69de97310\") " pod="openshift-marketplace/redhat-operators-nbtjc" Oct 08 07:30:22 crc kubenswrapper[4693]: I1008 07:30:22.824498 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d15ab2f2-caa2-4ee2-bdcb-27e69de97310-catalog-content\") pod \"redhat-operators-nbtjc\" (UID: \"d15ab2f2-caa2-4ee2-bdcb-27e69de97310\") " pod="openshift-marketplace/redhat-operators-nbtjc" Oct 08 07:30:22 crc kubenswrapper[4693]: I1008 07:30:22.824561 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhgxb\" (UniqueName: \"kubernetes.io/projected/d15ab2f2-caa2-4ee2-bdcb-27e69de97310-kube-api-access-rhgxb\") pod \"redhat-operators-nbtjc\" (UID: \"d15ab2f2-caa2-4ee2-bdcb-27e69de97310\") " pod="openshift-marketplace/redhat-operators-nbtjc" Oct 08 07:30:22 crc kubenswrapper[4693]: I1008 07:30:22.824604 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d15ab2f2-caa2-4ee2-bdcb-27e69de97310-utilities\") pod \"redhat-operators-nbtjc\" (UID: \"d15ab2f2-caa2-4ee2-bdcb-27e69de97310\") " pod="openshift-marketplace/redhat-operators-nbtjc" Oct 08 07:30:22 crc kubenswrapper[4693]: I1008 07:30:22.825163 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d15ab2f2-caa2-4ee2-bdcb-27e69de97310-utilities\") pod \"redhat-operators-nbtjc\" (UID: \"d15ab2f2-caa2-4ee2-bdcb-27e69de97310\") " pod="openshift-marketplace/redhat-operators-nbtjc" Oct 08 07:30:22 crc kubenswrapper[4693]: I1008 07:30:22.825397 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d15ab2f2-caa2-4ee2-bdcb-27e69de97310-catalog-content\") pod \"redhat-operators-nbtjc\" (UID: \"d15ab2f2-caa2-4ee2-bdcb-27e69de97310\") " pod="openshift-marketplace/redhat-operators-nbtjc" Oct 08 07:30:22 crc kubenswrapper[4693]: I1008 07:30:22.858801 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhgxb\" (UniqueName: \"kubernetes.io/projected/d15ab2f2-caa2-4ee2-bdcb-27e69de97310-kube-api-access-rhgxb\") pod \"redhat-operators-nbtjc\" (UID: \"d15ab2f2-caa2-4ee2-bdcb-27e69de97310\") " pod="openshift-marketplace/redhat-operators-nbtjc" Oct 08 07:30:22 crc kubenswrapper[4693]: I1008 07:30:22.921289 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nbtjc" Oct 08 07:30:23 crc kubenswrapper[4693]: I1008 07:30:23.348745 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nbtjc"] Oct 08 07:30:23 crc kubenswrapper[4693]: W1008 07:30:23.351836 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd15ab2f2_caa2_4ee2_bdcb_27e69de97310.slice/crio-51862fff1c0862ef344a1ed21d8fb94ac1b5d5c0b01727517f6f364831c78604 WatchSource:0}: Error finding container 51862fff1c0862ef344a1ed21d8fb94ac1b5d5c0b01727517f6f364831c78604: Status 404 returned error can't find the container with id 51862fff1c0862ef344a1ed21d8fb94ac1b5d5c0b01727517f6f364831c78604 Oct 08 07:30:24 crc kubenswrapper[4693]: I1008 07:30:24.007163 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-d8lh8" Oct 08 07:30:24 crc kubenswrapper[4693]: I1008 07:30:24.007377 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-d8lh8" Oct 08 07:30:24 crc kubenswrapper[4693]: I1008 07:30:24.051756 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-d8lh8" Oct 08 07:30:24 crc kubenswrapper[4693]: I1008 07:30:24.097584 4693 generic.go:334] "Generic (PLEG): container finished" podID="d15ab2f2-caa2-4ee2-bdcb-27e69de97310" containerID="591cb3d92700af9e74d4c073ba331c0c3883e271213a02e410914042c882a3b9" exitCode=0 Oct 08 07:30:24 crc kubenswrapper[4693]: I1008 07:30:24.097656 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbtjc" event={"ID":"d15ab2f2-caa2-4ee2-bdcb-27e69de97310","Type":"ContainerDied","Data":"591cb3d92700af9e74d4c073ba331c0c3883e271213a02e410914042c882a3b9"} Oct 08 07:30:24 crc kubenswrapper[4693]: I1008 07:30:24.097712 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbtjc" event={"ID":"d15ab2f2-caa2-4ee2-bdcb-27e69de97310","Type":"ContainerStarted","Data":"51862fff1c0862ef344a1ed21d8fb94ac1b5d5c0b01727517f6f364831c78604"} Oct 08 07:30:24 crc kubenswrapper[4693]: I1008 07:30:24.149665 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-d8lh8" Oct 08 07:30:26 crc kubenswrapper[4693]: I1008 07:30:26.116883 4693 generic.go:334] "Generic (PLEG): container finished" podID="d15ab2f2-caa2-4ee2-bdcb-27e69de97310" containerID="c0be85c40c4256608e80c653b7de6d56423bc5c39a0cee58324177bf3864a571" exitCode=0 Oct 08 07:30:26 crc kubenswrapper[4693]: I1008 07:30:26.116989 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbtjc" event={"ID":"d15ab2f2-caa2-4ee2-bdcb-27e69de97310","Type":"ContainerDied","Data":"c0be85c40c4256608e80c653b7de6d56423bc5c39a0cee58324177bf3864a571"} Oct 08 07:30:26 crc kubenswrapper[4693]: I1008 07:30:26.357057 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d8lh8"] Oct 08 07:30:26 crc kubenswrapper[4693]: I1008 07:30:26.357436 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-d8lh8" podUID="83d9be5a-a79a-46f8-a2c8-58bfee3df5c0" containerName="registry-server" containerID="cri-o://8ba0e3f3727200bb062d08832c9b89e7a39966492abc8d899523d02148d72781" gracePeriod=2 Oct 08 07:30:27 crc kubenswrapper[4693]: I1008 07:30:27.125874 4693 generic.go:334] "Generic (PLEG): container finished" podID="83d9be5a-a79a-46f8-a2c8-58bfee3df5c0" containerID="8ba0e3f3727200bb062d08832c9b89e7a39966492abc8d899523d02148d72781" exitCode=0 Oct 08 07:30:27 crc kubenswrapper[4693]: I1008 07:30:27.125925 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d8lh8" event={"ID":"83d9be5a-a79a-46f8-a2c8-58bfee3df5c0","Type":"ContainerDied","Data":"8ba0e3f3727200bb062d08832c9b89e7a39966492abc8d899523d02148d72781"} Oct 08 07:30:27 crc kubenswrapper[4693]: I1008 07:30:27.129943 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbtjc" event={"ID":"d15ab2f2-caa2-4ee2-bdcb-27e69de97310","Type":"ContainerStarted","Data":"0bd4e73f38b8d37dd3b96a77aaca85f8e93bc037f77e2c3a6aaf946ff8a386f2"} Oct 08 07:30:27 crc kubenswrapper[4693]: I1008 07:30:27.169711 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nbtjc" podStartSLOduration=2.515098051 podStartE2EDuration="5.16968593s" podCreationTimestamp="2025-10-08 07:30:22 +0000 UTC" firstStartedPulling="2025-10-08 07:30:24.099928586 +0000 UTC m=+809.470893531" lastFinishedPulling="2025-10-08 07:30:26.754516435 +0000 UTC m=+812.125481410" observedRunningTime="2025-10-08 07:30:27.155488454 +0000 UTC m=+812.526453419" watchObservedRunningTime="2025-10-08 07:30:27.16968593 +0000 UTC m=+812.540650875" Oct 08 07:30:27 crc kubenswrapper[4693]: I1008 07:30:27.330353 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d8lh8" Oct 08 07:30:27 crc kubenswrapper[4693]: I1008 07:30:27.388046 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6vztj\" (UniqueName: \"kubernetes.io/projected/83d9be5a-a79a-46f8-a2c8-58bfee3df5c0-kube-api-access-6vztj\") pod \"83d9be5a-a79a-46f8-a2c8-58bfee3df5c0\" (UID: \"83d9be5a-a79a-46f8-a2c8-58bfee3df5c0\") " Oct 08 07:30:27 crc kubenswrapper[4693]: I1008 07:30:27.388616 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83d9be5a-a79a-46f8-a2c8-58bfee3df5c0-utilities\") pod \"83d9be5a-a79a-46f8-a2c8-58bfee3df5c0\" (UID: \"83d9be5a-a79a-46f8-a2c8-58bfee3df5c0\") " Oct 08 07:30:27 crc kubenswrapper[4693]: I1008 07:30:27.388718 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83d9be5a-a79a-46f8-a2c8-58bfee3df5c0-catalog-content\") pod \"83d9be5a-a79a-46f8-a2c8-58bfee3df5c0\" (UID: \"83d9be5a-a79a-46f8-a2c8-58bfee3df5c0\") " Oct 08 07:30:27 crc kubenswrapper[4693]: I1008 07:30:27.389738 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83d9be5a-a79a-46f8-a2c8-58bfee3df5c0-utilities" (OuterVolumeSpecName: "utilities") pod "83d9be5a-a79a-46f8-a2c8-58bfee3df5c0" (UID: "83d9be5a-a79a-46f8-a2c8-58bfee3df5c0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:30:27 crc kubenswrapper[4693]: I1008 07:30:27.396224 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83d9be5a-a79a-46f8-a2c8-58bfee3df5c0-kube-api-access-6vztj" (OuterVolumeSpecName: "kube-api-access-6vztj") pod "83d9be5a-a79a-46f8-a2c8-58bfee3df5c0" (UID: "83d9be5a-a79a-46f8-a2c8-58bfee3df5c0"). InnerVolumeSpecName "kube-api-access-6vztj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:30:27 crc kubenswrapper[4693]: I1008 07:30:27.446597 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83d9be5a-a79a-46f8-a2c8-58bfee3df5c0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "83d9be5a-a79a-46f8-a2c8-58bfee3df5c0" (UID: "83d9be5a-a79a-46f8-a2c8-58bfee3df5c0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:30:27 crc kubenswrapper[4693]: I1008 07:30:27.491233 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6vztj\" (UniqueName: \"kubernetes.io/projected/83d9be5a-a79a-46f8-a2c8-58bfee3df5c0-kube-api-access-6vztj\") on node \"crc\" DevicePath \"\"" Oct 08 07:30:27 crc kubenswrapper[4693]: I1008 07:30:27.491256 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83d9be5a-a79a-46f8-a2c8-58bfee3df5c0-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 07:30:27 crc kubenswrapper[4693]: I1008 07:30:27.491311 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83d9be5a-a79a-46f8-a2c8-58bfee3df5c0-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 07:30:28 crc kubenswrapper[4693]: I1008 07:30:28.141473 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d8lh8" Oct 08 07:30:28 crc kubenswrapper[4693]: I1008 07:30:28.141533 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d8lh8" event={"ID":"83d9be5a-a79a-46f8-a2c8-58bfee3df5c0","Type":"ContainerDied","Data":"10d4959e42bc353f387ba4e16052c8b5d6b181786c0fe9761c2f98a36de8d313"} Oct 08 07:30:28 crc kubenswrapper[4693]: I1008 07:30:28.141577 4693 scope.go:117] "RemoveContainer" containerID="8ba0e3f3727200bb062d08832c9b89e7a39966492abc8d899523d02148d72781" Oct 08 07:30:28 crc kubenswrapper[4693]: I1008 07:30:28.179942 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d8lh8"] Oct 08 07:30:28 crc kubenswrapper[4693]: I1008 07:30:28.182707 4693 scope.go:117] "RemoveContainer" containerID="dbc355a4120ca6ce78be8bb9dd06d51d70f42f712f38857598edf4e0499746ab" Oct 08 07:30:28 crc kubenswrapper[4693]: I1008 07:30:28.187230 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-d8lh8"] Oct 08 07:30:28 crc kubenswrapper[4693]: I1008 07:30:28.209984 4693 scope.go:117] "RemoveContainer" containerID="05e6b9ed33c43c1db79230d5c93870d322c5db8ad7db3e9159faee9632efbc0f" Oct 08 07:30:29 crc kubenswrapper[4693]: I1008 07:30:29.393342 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83d9be5a-a79a-46f8-a2c8-58bfee3df5c0" path="/var/lib/kubelet/pods/83d9be5a-a79a-46f8-a2c8-58bfee3df5c0/volumes" Oct 08 07:30:32 crc kubenswrapper[4693]: I1008 07:30:32.921791 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nbtjc" Oct 08 07:30:32 crc kubenswrapper[4693]: I1008 07:30:32.921906 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nbtjc" Oct 08 07:30:33 crc kubenswrapper[4693]: I1008 07:30:33.980242 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-nbtjc" podUID="d15ab2f2-caa2-4ee2-bdcb-27e69de97310" containerName="registry-server" probeResult="failure" output=< Oct 08 07:30:33 crc kubenswrapper[4693]: timeout: failed to connect service ":50051" within 1s Oct 08 07:30:33 crc kubenswrapper[4693]: > Oct 08 07:30:38 crc kubenswrapper[4693]: I1008 07:30:38.028163 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-dzwl8"] Oct 08 07:30:38 crc kubenswrapper[4693]: E1008 07:30:38.028923 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83d9be5a-a79a-46f8-a2c8-58bfee3df5c0" containerName="extract-content" Oct 08 07:30:38 crc kubenswrapper[4693]: I1008 07:30:38.028937 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="83d9be5a-a79a-46f8-a2c8-58bfee3df5c0" containerName="extract-content" Oct 08 07:30:38 crc kubenswrapper[4693]: E1008 07:30:38.028948 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83d9be5a-a79a-46f8-a2c8-58bfee3df5c0" containerName="extract-utilities" Oct 08 07:30:38 crc kubenswrapper[4693]: I1008 07:30:38.028954 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="83d9be5a-a79a-46f8-a2c8-58bfee3df5c0" containerName="extract-utilities" Oct 08 07:30:38 crc kubenswrapper[4693]: E1008 07:30:38.028974 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83d9be5a-a79a-46f8-a2c8-58bfee3df5c0" containerName="registry-server" Oct 08 07:30:38 crc kubenswrapper[4693]: I1008 07:30:38.028983 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="83d9be5a-a79a-46f8-a2c8-58bfee3df5c0" containerName="registry-server" Oct 08 07:30:38 crc kubenswrapper[4693]: I1008 07:30:38.029106 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="83d9be5a-a79a-46f8-a2c8-58bfee3df5c0" containerName="registry-server" Oct 08 07:30:38 crc kubenswrapper[4693]: I1008 07:30:38.029933 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dzwl8" Oct 08 07:30:38 crc kubenswrapper[4693]: I1008 07:30:38.045352 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dzwl8"] Oct 08 07:30:38 crc kubenswrapper[4693]: I1008 07:30:38.144398 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7vwb\" (UniqueName: \"kubernetes.io/projected/17985107-f4e4-4b3d-97d0-640d4d7134ea-kube-api-access-b7vwb\") pod \"certified-operators-dzwl8\" (UID: \"17985107-f4e4-4b3d-97d0-640d4d7134ea\") " pod="openshift-marketplace/certified-operators-dzwl8" Oct 08 07:30:38 crc kubenswrapper[4693]: I1008 07:30:38.144707 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/17985107-f4e4-4b3d-97d0-640d4d7134ea-utilities\") pod \"certified-operators-dzwl8\" (UID: \"17985107-f4e4-4b3d-97d0-640d4d7134ea\") " pod="openshift-marketplace/certified-operators-dzwl8" Oct 08 07:30:38 crc kubenswrapper[4693]: I1008 07:30:38.144870 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/17985107-f4e4-4b3d-97d0-640d4d7134ea-catalog-content\") pod \"certified-operators-dzwl8\" (UID: \"17985107-f4e4-4b3d-97d0-640d4d7134ea\") " pod="openshift-marketplace/certified-operators-dzwl8" Oct 08 07:30:38 crc kubenswrapper[4693]: I1008 07:30:38.245807 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7vwb\" (UniqueName: \"kubernetes.io/projected/17985107-f4e4-4b3d-97d0-640d4d7134ea-kube-api-access-b7vwb\") pod \"certified-operators-dzwl8\" (UID: \"17985107-f4e4-4b3d-97d0-640d4d7134ea\") " pod="openshift-marketplace/certified-operators-dzwl8" Oct 08 07:30:38 crc kubenswrapper[4693]: I1008 07:30:38.246116 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/17985107-f4e4-4b3d-97d0-640d4d7134ea-utilities\") pod \"certified-operators-dzwl8\" (UID: \"17985107-f4e4-4b3d-97d0-640d4d7134ea\") " pod="openshift-marketplace/certified-operators-dzwl8" Oct 08 07:30:38 crc kubenswrapper[4693]: I1008 07:30:38.246253 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/17985107-f4e4-4b3d-97d0-640d4d7134ea-catalog-content\") pod \"certified-operators-dzwl8\" (UID: \"17985107-f4e4-4b3d-97d0-640d4d7134ea\") " pod="openshift-marketplace/certified-operators-dzwl8" Oct 08 07:30:38 crc kubenswrapper[4693]: I1008 07:30:38.246657 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/17985107-f4e4-4b3d-97d0-640d4d7134ea-utilities\") pod \"certified-operators-dzwl8\" (UID: \"17985107-f4e4-4b3d-97d0-640d4d7134ea\") " pod="openshift-marketplace/certified-operators-dzwl8" Oct 08 07:30:38 crc kubenswrapper[4693]: I1008 07:30:38.246891 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/17985107-f4e4-4b3d-97d0-640d4d7134ea-catalog-content\") pod \"certified-operators-dzwl8\" (UID: \"17985107-f4e4-4b3d-97d0-640d4d7134ea\") " pod="openshift-marketplace/certified-operators-dzwl8" Oct 08 07:30:38 crc kubenswrapper[4693]: I1008 07:30:38.265105 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7vwb\" (UniqueName: \"kubernetes.io/projected/17985107-f4e4-4b3d-97d0-640d4d7134ea-kube-api-access-b7vwb\") pod \"certified-operators-dzwl8\" (UID: \"17985107-f4e4-4b3d-97d0-640d4d7134ea\") " pod="openshift-marketplace/certified-operators-dzwl8" Oct 08 07:30:38 crc kubenswrapper[4693]: I1008 07:30:38.343570 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dzwl8" Oct 08 07:30:38 crc kubenswrapper[4693]: I1008 07:30:38.681353 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dzwl8"] Oct 08 07:30:39 crc kubenswrapper[4693]: I1008 07:30:39.227777 4693 generic.go:334] "Generic (PLEG): container finished" podID="17985107-f4e4-4b3d-97d0-640d4d7134ea" containerID="2b12cf3fb62fcbe4524754620526ed05985d9505931acf9495733dd1ce721ff9" exitCode=0 Oct 08 07:30:39 crc kubenswrapper[4693]: I1008 07:30:39.228067 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dzwl8" event={"ID":"17985107-f4e4-4b3d-97d0-640d4d7134ea","Type":"ContainerDied","Data":"2b12cf3fb62fcbe4524754620526ed05985d9505931acf9495733dd1ce721ff9"} Oct 08 07:30:39 crc kubenswrapper[4693]: I1008 07:30:39.228095 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dzwl8" event={"ID":"17985107-f4e4-4b3d-97d0-640d4d7134ea","Type":"ContainerStarted","Data":"7bc21121178d90fbb24aae54bd749ec999bdbc875c9335632ce45f84f8e06105"} Oct 08 07:30:40 crc kubenswrapper[4693]: I1008 07:30:40.237539 4693 generic.go:334] "Generic (PLEG): container finished" podID="17985107-f4e4-4b3d-97d0-640d4d7134ea" containerID="d36d725571806fcbd7495afea21b1cc0e54dce2051ceaa04569147af3ab4640c" exitCode=0 Oct 08 07:30:40 crc kubenswrapper[4693]: I1008 07:30:40.237591 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dzwl8" event={"ID":"17985107-f4e4-4b3d-97d0-640d4d7134ea","Type":"ContainerDied","Data":"d36d725571806fcbd7495afea21b1cc0e54dce2051ceaa04569147af3ab4640c"} Oct 08 07:30:41 crc kubenswrapper[4693]: I1008 07:30:41.248694 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dzwl8" event={"ID":"17985107-f4e4-4b3d-97d0-640d4d7134ea","Type":"ContainerStarted","Data":"82dd74807185c376810a4db00d1272a359804b67e759537b2d43df1bcb363088"} Oct 08 07:30:41 crc kubenswrapper[4693]: I1008 07:30:41.277677 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-dzwl8" podStartSLOduration=1.804730407 podStartE2EDuration="3.277656416s" podCreationTimestamp="2025-10-08 07:30:38 +0000 UTC" firstStartedPulling="2025-10-08 07:30:39.229768102 +0000 UTC m=+824.600733037" lastFinishedPulling="2025-10-08 07:30:40.702694111 +0000 UTC m=+826.073659046" observedRunningTime="2025-10-08 07:30:41.275572911 +0000 UTC m=+826.646537876" watchObservedRunningTime="2025-10-08 07:30:41.277656416 +0000 UTC m=+826.648621361" Oct 08 07:30:42 crc kubenswrapper[4693]: I1008 07:30:42.986080 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nbtjc" Oct 08 07:30:43 crc kubenswrapper[4693]: I1008 07:30:43.048421 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nbtjc" Oct 08 07:30:44 crc kubenswrapper[4693]: I1008 07:30:44.957446 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nbtjc"] Oct 08 07:30:44 crc kubenswrapper[4693]: I1008 07:30:44.957954 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nbtjc" podUID="d15ab2f2-caa2-4ee2-bdcb-27e69de97310" containerName="registry-server" containerID="cri-o://0bd4e73f38b8d37dd3b96a77aaca85f8e93bc037f77e2c3a6aaf946ff8a386f2" gracePeriod=2 Oct 08 07:30:45 crc kubenswrapper[4693]: I1008 07:30:45.309262 4693 generic.go:334] "Generic (PLEG): container finished" podID="d15ab2f2-caa2-4ee2-bdcb-27e69de97310" containerID="0bd4e73f38b8d37dd3b96a77aaca85f8e93bc037f77e2c3a6aaf946ff8a386f2" exitCode=0 Oct 08 07:30:45 crc kubenswrapper[4693]: I1008 07:30:45.309791 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbtjc" event={"ID":"d15ab2f2-caa2-4ee2-bdcb-27e69de97310","Type":"ContainerDied","Data":"0bd4e73f38b8d37dd3b96a77aaca85f8e93bc037f77e2c3a6aaf946ff8a386f2"} Oct 08 07:30:45 crc kubenswrapper[4693]: I1008 07:30:45.496079 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nbtjc" Oct 08 07:30:45 crc kubenswrapper[4693]: I1008 07:30:45.657964 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d15ab2f2-caa2-4ee2-bdcb-27e69de97310-catalog-content\") pod \"d15ab2f2-caa2-4ee2-bdcb-27e69de97310\" (UID: \"d15ab2f2-caa2-4ee2-bdcb-27e69de97310\") " Oct 08 07:30:45 crc kubenswrapper[4693]: I1008 07:30:45.658034 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rhgxb\" (UniqueName: \"kubernetes.io/projected/d15ab2f2-caa2-4ee2-bdcb-27e69de97310-kube-api-access-rhgxb\") pod \"d15ab2f2-caa2-4ee2-bdcb-27e69de97310\" (UID: \"d15ab2f2-caa2-4ee2-bdcb-27e69de97310\") " Oct 08 07:30:45 crc kubenswrapper[4693]: I1008 07:30:45.658077 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d15ab2f2-caa2-4ee2-bdcb-27e69de97310-utilities\") pod \"d15ab2f2-caa2-4ee2-bdcb-27e69de97310\" (UID: \"d15ab2f2-caa2-4ee2-bdcb-27e69de97310\") " Oct 08 07:30:45 crc kubenswrapper[4693]: I1008 07:30:45.659180 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d15ab2f2-caa2-4ee2-bdcb-27e69de97310-utilities" (OuterVolumeSpecName: "utilities") pod "d15ab2f2-caa2-4ee2-bdcb-27e69de97310" (UID: "d15ab2f2-caa2-4ee2-bdcb-27e69de97310"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:30:45 crc kubenswrapper[4693]: I1008 07:30:45.664193 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d15ab2f2-caa2-4ee2-bdcb-27e69de97310-kube-api-access-rhgxb" (OuterVolumeSpecName: "kube-api-access-rhgxb") pod "d15ab2f2-caa2-4ee2-bdcb-27e69de97310" (UID: "d15ab2f2-caa2-4ee2-bdcb-27e69de97310"). InnerVolumeSpecName "kube-api-access-rhgxb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:30:45 crc kubenswrapper[4693]: I1008 07:30:45.745354 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d15ab2f2-caa2-4ee2-bdcb-27e69de97310-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d15ab2f2-caa2-4ee2-bdcb-27e69de97310" (UID: "d15ab2f2-caa2-4ee2-bdcb-27e69de97310"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:30:45 crc kubenswrapper[4693]: I1008 07:30:45.758992 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d15ab2f2-caa2-4ee2-bdcb-27e69de97310-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 07:30:45 crc kubenswrapper[4693]: I1008 07:30:45.759017 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d15ab2f2-caa2-4ee2-bdcb-27e69de97310-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 07:30:45 crc kubenswrapper[4693]: I1008 07:30:45.759027 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rhgxb\" (UniqueName: \"kubernetes.io/projected/d15ab2f2-caa2-4ee2-bdcb-27e69de97310-kube-api-access-rhgxb\") on node \"crc\" DevicePath \"\"" Oct 08 07:30:46 crc kubenswrapper[4693]: I1008 07:30:46.323374 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbtjc" event={"ID":"d15ab2f2-caa2-4ee2-bdcb-27e69de97310","Type":"ContainerDied","Data":"51862fff1c0862ef344a1ed21d8fb94ac1b5d5c0b01727517f6f364831c78604"} Oct 08 07:30:46 crc kubenswrapper[4693]: I1008 07:30:46.323733 4693 scope.go:117] "RemoveContainer" containerID="0bd4e73f38b8d37dd3b96a77aaca85f8e93bc037f77e2c3a6aaf946ff8a386f2" Oct 08 07:30:46 crc kubenswrapper[4693]: I1008 07:30:46.323448 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nbtjc" Oct 08 07:30:46 crc kubenswrapper[4693]: I1008 07:30:46.342719 4693 scope.go:117] "RemoveContainer" containerID="c0be85c40c4256608e80c653b7de6d56423bc5c39a0cee58324177bf3864a571" Oct 08 07:30:46 crc kubenswrapper[4693]: I1008 07:30:46.349093 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nbtjc"] Oct 08 07:30:46 crc kubenswrapper[4693]: I1008 07:30:46.357079 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-nbtjc"] Oct 08 07:30:46 crc kubenswrapper[4693]: I1008 07:30:46.378641 4693 scope.go:117] "RemoveContainer" containerID="591cb3d92700af9e74d4c073ba331c0c3883e271213a02e410914042c882a3b9" Oct 08 07:30:47 crc kubenswrapper[4693]: I1008 07:30:47.371415 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d15ab2f2-caa2-4ee2-bdcb-27e69de97310" path="/var/lib/kubelet/pods/d15ab2f2-caa2-4ee2-bdcb-27e69de97310/volumes" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.344636 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-dzwl8" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.346975 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-dzwl8" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.407049 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-dzwl8" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.681131 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-58c4cd55f4-5tq42"] Oct 08 07:30:48 crc kubenswrapper[4693]: E1008 07:30:48.681363 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d15ab2f2-caa2-4ee2-bdcb-27e69de97310" containerName="extract-utilities" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.681375 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="d15ab2f2-caa2-4ee2-bdcb-27e69de97310" containerName="extract-utilities" Oct 08 07:30:48 crc kubenswrapper[4693]: E1008 07:30:48.681384 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d15ab2f2-caa2-4ee2-bdcb-27e69de97310" containerName="extract-content" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.681389 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="d15ab2f2-caa2-4ee2-bdcb-27e69de97310" containerName="extract-content" Oct 08 07:30:48 crc kubenswrapper[4693]: E1008 07:30:48.681411 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d15ab2f2-caa2-4ee2-bdcb-27e69de97310" containerName="registry-server" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.681417 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="d15ab2f2-caa2-4ee2-bdcb-27e69de97310" containerName="registry-server" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.681525 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="d15ab2f2-caa2-4ee2-bdcb-27e69de97310" containerName="registry-server" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.682147 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-5tq42" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.684111 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-sdhnd" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.687582 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-7d4d4f8d-d2ztn"] Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.688889 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-d2ztn" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.690245 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-pxsb4" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.693513 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-75dfd9b554-dmgt7"] Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.693626 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bz7c9\" (UniqueName: \"kubernetes.io/projected/b855e40c-e0b0-4322-8099-d4e51c0b92f1-kube-api-access-bz7c9\") pod \"cinder-operator-controller-manager-7d4d4f8d-d2ztn\" (UID: \"b855e40c-e0b0-4322-8099-d4e51c0b92f1\") " pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-d2ztn" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.693756 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfhtv\" (UniqueName: \"kubernetes.io/projected/cd94a973-75b2-4722-a298-16e6bd67aa61-kube-api-access-hfhtv\") pod \"barbican-operator-controller-manager-58c4cd55f4-5tq42\" (UID: \"cd94a973-75b2-4722-a298-16e6bd67aa61\") " pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-5tq42" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.694395 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-dmgt7" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.696721 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-dqdf6" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.717067 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-5dc44df7d5-hbqzd"] Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.718033 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-hbqzd" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.719714 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-4dnjz" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.723947 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-7d4d4f8d-d2ztn"] Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.729712 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-58c4cd55f4-5tq42"] Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.729764 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-75dfd9b554-dmgt7"] Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.737218 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-5dc44df7d5-hbqzd"] Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.750926 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-54b4974c45-h25pk"] Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.751839 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-54b4974c45-h25pk" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.755589 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-b4lh2" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.763153 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-76d5b87f47-njkdz"] Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.763994 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-njkdz" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.771548 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-54b4974c45-h25pk"] Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.772640 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-7p26p" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.779897 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-658588b8c9-6vt47"] Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.781010 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-6vt47" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.784729 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.784893 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-lvw9r" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.789063 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-76d5b87f47-njkdz"] Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.794681 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bz7c9\" (UniqueName: \"kubernetes.io/projected/b855e40c-e0b0-4322-8099-d4e51c0b92f1-kube-api-access-bz7c9\") pod \"cinder-operator-controller-manager-7d4d4f8d-d2ztn\" (UID: \"b855e40c-e0b0-4322-8099-d4e51c0b92f1\") " pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-d2ztn" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.794742 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9gr5\" (UniqueName: \"kubernetes.io/projected/2f8dab68-da73-412a-bf83-95f2ac37f289-kube-api-access-d9gr5\") pod \"designate-operator-controller-manager-75dfd9b554-dmgt7\" (UID: \"2f8dab68-da73-412a-bf83-95f2ac37f289\") " pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-dmgt7" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.794787 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfhtv\" (UniqueName: \"kubernetes.io/projected/cd94a973-75b2-4722-a298-16e6bd67aa61-kube-api-access-hfhtv\") pod \"barbican-operator-controller-manager-58c4cd55f4-5tq42\" (UID: \"cd94a973-75b2-4722-a298-16e6bd67aa61\") " pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-5tq42" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.794842 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9w9kd\" (UniqueName: \"kubernetes.io/projected/6fc858ec-6edd-4e45-ba44-fe2ea26a0614-kube-api-access-9w9kd\") pod \"infra-operator-controller-manager-658588b8c9-6vt47\" (UID: \"6fc858ec-6edd-4e45-ba44-fe2ea26a0614\") " pod="openstack-operators/infra-operator-controller-manager-658588b8c9-6vt47" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.794874 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nh9mn\" (UniqueName: \"kubernetes.io/projected/72f3f2ae-ba07-4045-9ac4-fc4f0dee2682-kube-api-access-nh9mn\") pod \"glance-operator-controller-manager-5dc44df7d5-hbqzd\" (UID: \"72f3f2ae-ba07-4045-9ac4-fc4f0dee2682\") " pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-hbqzd" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.794900 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6fc858ec-6edd-4e45-ba44-fe2ea26a0614-cert\") pod \"infra-operator-controller-manager-658588b8c9-6vt47\" (UID: \"6fc858ec-6edd-4e45-ba44-fe2ea26a0614\") " pod="openstack-operators/infra-operator-controller-manager-658588b8c9-6vt47" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.794935 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvcjs\" (UniqueName: \"kubernetes.io/projected/bfc23a1a-faab-44e8-91f7-29d4e95f0fdc-kube-api-access-qvcjs\") pod \"horizon-operator-controller-manager-76d5b87f47-njkdz\" (UID: \"bfc23a1a-faab-44e8-91f7-29d4e95f0fdc\") " pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-njkdz" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.794963 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9kdz\" (UniqueName: \"kubernetes.io/projected/81212063-ccc7-423c-b817-60f7280ee4f9-kube-api-access-z9kdz\") pod \"heat-operator-controller-manager-54b4974c45-h25pk\" (UID: \"81212063-ccc7-423c-b817-60f7280ee4f9\") " pod="openstack-operators/heat-operator-controller-manager-54b4974c45-h25pk" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.815153 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-649675d675-w96xx"] Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.816251 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-649675d675-w96xx" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.819122 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-jtc7n" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.820226 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bz7c9\" (UniqueName: \"kubernetes.io/projected/b855e40c-e0b0-4322-8099-d4e51c0b92f1-kube-api-access-bz7c9\") pod \"cinder-operator-controller-manager-7d4d4f8d-d2ztn\" (UID: \"b855e40c-e0b0-4322-8099-d4e51c0b92f1\") " pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-d2ztn" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.820838 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-649675d675-w96xx"] Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.835419 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-vqp2x"] Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.836419 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-vqp2x" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.836417 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfhtv\" (UniqueName: \"kubernetes.io/projected/cd94a973-75b2-4722-a298-16e6bd67aa61-kube-api-access-hfhtv\") pod \"barbican-operator-controller-manager-58c4cd55f4-5tq42\" (UID: \"cd94a973-75b2-4722-a298-16e6bd67aa61\") " pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-5tq42" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.840627 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-hkvd4" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.873678 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-658588b8c9-6vt47"] Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.897239 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9gr5\" (UniqueName: \"kubernetes.io/projected/2f8dab68-da73-412a-bf83-95f2ac37f289-kube-api-access-d9gr5\") pod \"designate-operator-controller-manager-75dfd9b554-dmgt7\" (UID: \"2f8dab68-da73-412a-bf83-95f2ac37f289\") " pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-dmgt7" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.899213 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9w9kd\" (UniqueName: \"kubernetes.io/projected/6fc858ec-6edd-4e45-ba44-fe2ea26a0614-kube-api-access-9w9kd\") pod \"infra-operator-controller-manager-658588b8c9-6vt47\" (UID: \"6fc858ec-6edd-4e45-ba44-fe2ea26a0614\") " pod="openstack-operators/infra-operator-controller-manager-658588b8c9-6vt47" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.899345 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nh9mn\" (UniqueName: \"kubernetes.io/projected/72f3f2ae-ba07-4045-9ac4-fc4f0dee2682-kube-api-access-nh9mn\") pod \"glance-operator-controller-manager-5dc44df7d5-hbqzd\" (UID: \"72f3f2ae-ba07-4045-9ac4-fc4f0dee2682\") " pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-hbqzd" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.899554 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6fc858ec-6edd-4e45-ba44-fe2ea26a0614-cert\") pod \"infra-operator-controller-manager-658588b8c9-6vt47\" (UID: \"6fc858ec-6edd-4e45-ba44-fe2ea26a0614\") " pod="openstack-operators/infra-operator-controller-manager-658588b8c9-6vt47" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.899663 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvcjs\" (UniqueName: \"kubernetes.io/projected/bfc23a1a-faab-44e8-91f7-29d4e95f0fdc-kube-api-access-qvcjs\") pod \"horizon-operator-controller-manager-76d5b87f47-njkdz\" (UID: \"bfc23a1a-faab-44e8-91f7-29d4e95f0fdc\") " pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-njkdz" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.899702 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9kdz\" (UniqueName: \"kubernetes.io/projected/81212063-ccc7-423c-b817-60f7280ee4f9-kube-api-access-z9kdz\") pod \"heat-operator-controller-manager-54b4974c45-h25pk\" (UID: \"81212063-ccc7-423c-b817-60f7280ee4f9\") " pod="openstack-operators/heat-operator-controller-manager-54b4974c45-h25pk" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.902407 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-vqp2x"] Oct 08 07:30:48 crc kubenswrapper[4693]: E1008 07:30:48.903080 4693 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Oct 08 07:30:48 crc kubenswrapper[4693]: E1008 07:30:48.903140 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6fc858ec-6edd-4e45-ba44-fe2ea26a0614-cert podName:6fc858ec-6edd-4e45-ba44-fe2ea26a0614 nodeName:}" failed. No retries permitted until 2025-10-08 07:30:49.403118261 +0000 UTC m=+834.774083196 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/6fc858ec-6edd-4e45-ba44-fe2ea26a0614-cert") pod "infra-operator-controller-manager-658588b8c9-6vt47" (UID: "6fc858ec-6edd-4e45-ba44-fe2ea26a0614") : secret "infra-operator-webhook-server-cert" not found Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.931250 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9kdz\" (UniqueName: \"kubernetes.io/projected/81212063-ccc7-423c-b817-60f7280ee4f9-kube-api-access-z9kdz\") pod \"heat-operator-controller-manager-54b4974c45-h25pk\" (UID: \"81212063-ccc7-423c-b817-60f7280ee4f9\") " pod="openstack-operators/heat-operator-controller-manager-54b4974c45-h25pk" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.947701 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9w9kd\" (UniqueName: \"kubernetes.io/projected/6fc858ec-6edd-4e45-ba44-fe2ea26a0614-kube-api-access-9w9kd\") pod \"infra-operator-controller-manager-658588b8c9-6vt47\" (UID: \"6fc858ec-6edd-4e45-ba44-fe2ea26a0614\") " pod="openstack-operators/infra-operator-controller-manager-658588b8c9-6vt47" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.948101 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nh9mn\" (UniqueName: \"kubernetes.io/projected/72f3f2ae-ba07-4045-9ac4-fc4f0dee2682-kube-api-access-nh9mn\") pod \"glance-operator-controller-manager-5dc44df7d5-hbqzd\" (UID: \"72f3f2ae-ba07-4045-9ac4-fc4f0dee2682\") " pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-hbqzd" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.948190 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9gr5\" (UniqueName: \"kubernetes.io/projected/2f8dab68-da73-412a-bf83-95f2ac37f289-kube-api-access-d9gr5\") pod \"designate-operator-controller-manager-75dfd9b554-dmgt7\" (UID: \"2f8dab68-da73-412a-bf83-95f2ac37f289\") " pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-dmgt7" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.948520 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvcjs\" (UniqueName: \"kubernetes.io/projected/bfc23a1a-faab-44e8-91f7-29d4e95f0fdc-kube-api-access-qvcjs\") pod \"horizon-operator-controller-manager-76d5b87f47-njkdz\" (UID: \"bfc23a1a-faab-44e8-91f7-29d4e95f0fdc\") " pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-njkdz" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.961876 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-65d89cfd9f-blmhk"] Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.962899 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-blmhk" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.965895 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-r7kjf" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.974883 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-65sjq"] Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.975834 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-65sjq" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.988872 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-7ccc6" Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.996922 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-65d89cfd9f-blmhk"] Oct 08 07:30:48 crc kubenswrapper[4693]: I1008 07:30:48.997416 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-5tq42" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.007825 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-65sjq"] Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.011318 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-d2ztn" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.016722 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-8d984cc4d-567tm"] Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.017667 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-567tm" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.021516 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlvxt\" (UniqueName: \"kubernetes.io/projected/d96bb98a-f416-4d93-b145-37632210f2f8-kube-api-access-wlvxt\") pod \"ironic-operator-controller-manager-649675d675-w96xx\" (UID: \"d96bb98a-f416-4d93-b145-37632210f2f8\") " pod="openstack-operators/ironic-operator-controller-manager-649675d675-w96xx" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.021556 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhcvq\" (UniqueName: \"kubernetes.io/projected/d26d490a-dba0-46d4-b636-836a4dde53be-kube-api-access-xhcvq\") pod \"keystone-operator-controller-manager-7b5ccf6d9c-vqp2x\" (UID: \"d26d490a-dba0-46d4-b636-836a4dde53be\") " pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-vqp2x" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.024099 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-jbv4q" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.029988 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-7c7fc454ff-lhs5r"] Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.031738 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-lhs5r" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.034234 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-8d984cc4d-567tm"] Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.037443 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7468f855d8-4zvpz"] Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.038356 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-4zvpz" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.043728 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-2qmtc" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.044118 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-km7k7" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.045863 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-7c7fc454ff-lhs5r"] Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.058627 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7468f855d8-4zvpz"] Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.068911 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w"] Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.070321 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.071559 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.075041 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-tkw4n" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.079824 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-jtx9z"] Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.080944 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-jtx9z" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.084612 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-dmgt7" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.085476 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-h44p8" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.087943 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-54689d9f88-h2npd"] Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.089144 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-54689d9f88-h2npd" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.093836 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-f8hqc" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.098110 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-hbqzd" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.102136 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w"] Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.110731 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-54b4974c45-h25pk" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.115479 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-jtx9z"] Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.120176 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-njkdz" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.122866 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlvxt\" (UniqueName: \"kubernetes.io/projected/d96bb98a-f416-4d93-b145-37632210f2f8-kube-api-access-wlvxt\") pod \"ironic-operator-controller-manager-649675d675-w96xx\" (UID: \"d96bb98a-f416-4d93-b145-37632210f2f8\") " pod="openstack-operators/ironic-operator-controller-manager-649675d675-w96xx" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.122904 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhcvq\" (UniqueName: \"kubernetes.io/projected/d26d490a-dba0-46d4-b636-836a4dde53be-kube-api-access-xhcvq\") pod \"keystone-operator-controller-manager-7b5ccf6d9c-vqp2x\" (UID: \"d26d490a-dba0-46d4-b636-836a4dde53be\") " pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-vqp2x" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.122929 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxw7c\" (UniqueName: \"kubernetes.io/projected/3fedcb35-9741-40ee-bdb0-a1d78a5da3e6-kube-api-access-kxw7c\") pod \"neutron-operator-controller-manager-8d984cc4d-567tm\" (UID: \"3fedcb35-9741-40ee-bdb0-a1d78a5da3e6\") " pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-567tm" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.122946 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pllsj\" (UniqueName: \"kubernetes.io/projected/6772aabf-b5fa-4fc7-8925-0926ed242e9b-kube-api-access-pllsj\") pod \"mariadb-operator-controller-manager-6cd6d7bdf5-65sjq\" (UID: \"6772aabf-b5fa-4fc7-8925-0926ed242e9b\") " pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-65sjq" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.122986 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scwrb\" (UniqueName: \"kubernetes.io/projected/b1c54c65-b7f3-4f2e-91c8-c04f25b42d3f-kube-api-access-scwrb\") pod \"manila-operator-controller-manager-65d89cfd9f-blmhk\" (UID: \"b1c54c65-b7f3-4f2e-91c8-c04f25b42d3f\") " pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-blmhk" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.134855 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-54689d9f88-h2npd"] Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.137625 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-6859f9b676-7sw5d"] Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.138916 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-7sw5d" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.139413 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhcvq\" (UniqueName: \"kubernetes.io/projected/d26d490a-dba0-46d4-b636-836a4dde53be-kube-api-access-xhcvq\") pod \"keystone-operator-controller-manager-7b5ccf6d9c-vqp2x\" (UID: \"d26d490a-dba0-46d4-b636-836a4dde53be\") " pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-vqp2x" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.141191 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlvxt\" (UniqueName: \"kubernetes.io/projected/d96bb98a-f416-4d93-b145-37632210f2f8-kube-api-access-wlvxt\") pod \"ironic-operator-controller-manager-649675d675-w96xx\" (UID: \"d96bb98a-f416-4d93-b145-37632210f2f8\") " pod="openstack-operators/ironic-operator-controller-manager-649675d675-w96xx" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.142190 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-qrpq2" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.146182 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-6859f9b676-7sw5d"] Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.158912 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-6h28f"] Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.168475 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-6h28f"] Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.168591 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-6h28f" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.173354 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-d9htk" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.195002 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-649675d675-w96xx" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.205210 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-vqp2x" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.224291 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a45ca91b-ddca-4c17-ab8b-d106345451d3-cert\") pod \"openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w\" (UID: \"a45ca91b-ddca-4c17-ab8b-d106345451d3\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.224344 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7sqc\" (UniqueName: \"kubernetes.io/projected/a45ca91b-ddca-4c17-ab8b-d106345451d3-kube-api-access-r7sqc\") pod \"openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w\" (UID: \"a45ca91b-ddca-4c17-ab8b-d106345451d3\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.224379 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5dtj\" (UniqueName: \"kubernetes.io/projected/422c537a-d341-45ac-ac02-3fb221b66ed4-kube-api-access-t5dtj\") pod \"octavia-operator-controller-manager-7468f855d8-4zvpz\" (UID: \"422c537a-d341-45ac-ac02-3fb221b66ed4\") " pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-4zvpz" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.224429 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8gq9\" (UniqueName: \"kubernetes.io/projected/a8912110-fa72-4e6b-9c38-7b62b34772fa-kube-api-access-s8gq9\") pod \"ovn-operator-controller-manager-6d8b6f9b9-jtx9z\" (UID: \"a8912110-fa72-4e6b-9c38-7b62b34772fa\") " pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-jtx9z" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.224454 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22f2m\" (UniqueName: \"kubernetes.io/projected/9d40d3d9-711e-461b-b859-684b1af38ee9-kube-api-access-22f2m\") pod \"placement-operator-controller-manager-54689d9f88-h2npd\" (UID: \"9d40d3d9-711e-461b-b859-684b1af38ee9\") " pod="openstack-operators/placement-operator-controller-manager-54689d9f88-h2npd" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.224492 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxw7c\" (UniqueName: \"kubernetes.io/projected/3fedcb35-9741-40ee-bdb0-a1d78a5da3e6-kube-api-access-kxw7c\") pod \"neutron-operator-controller-manager-8d984cc4d-567tm\" (UID: \"3fedcb35-9741-40ee-bdb0-a1d78a5da3e6\") " pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-567tm" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.224513 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pllsj\" (UniqueName: \"kubernetes.io/projected/6772aabf-b5fa-4fc7-8925-0926ed242e9b-kube-api-access-pllsj\") pod \"mariadb-operator-controller-manager-6cd6d7bdf5-65sjq\" (UID: \"6772aabf-b5fa-4fc7-8925-0926ed242e9b\") " pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-65sjq" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.224570 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scwrb\" (UniqueName: \"kubernetes.io/projected/b1c54c65-b7f3-4f2e-91c8-c04f25b42d3f-kube-api-access-scwrb\") pod \"manila-operator-controller-manager-65d89cfd9f-blmhk\" (UID: \"b1c54c65-b7f3-4f2e-91c8-c04f25b42d3f\") " pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-blmhk" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.224597 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dmhw\" (UniqueName: \"kubernetes.io/projected/cba353e7-9050-4433-a6b6-2ca4f67d077a-kube-api-access-8dmhw\") pod \"nova-operator-controller-manager-7c7fc454ff-lhs5r\" (UID: \"cba353e7-9050-4433-a6b6-2ca4f67d077a\") " pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-lhs5r" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.227995 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5cd5cb47d7-9bz2k"] Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.230908 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-9bz2k" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.236268 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5cd5cb47d7-9bz2k"] Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.242859 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-wz5md" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.253160 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scwrb\" (UniqueName: \"kubernetes.io/projected/b1c54c65-b7f3-4f2e-91c8-c04f25b42d3f-kube-api-access-scwrb\") pod \"manila-operator-controller-manager-65d89cfd9f-blmhk\" (UID: \"b1c54c65-b7f3-4f2e-91c8-c04f25b42d3f\") " pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-blmhk" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.264643 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxw7c\" (UniqueName: \"kubernetes.io/projected/3fedcb35-9741-40ee-bdb0-a1d78a5da3e6-kube-api-access-kxw7c\") pod \"neutron-operator-controller-manager-8d984cc4d-567tm\" (UID: \"3fedcb35-9741-40ee-bdb0-a1d78a5da3e6\") " pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-567tm" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.302716 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-blmhk" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.314290 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pllsj\" (UniqueName: \"kubernetes.io/projected/6772aabf-b5fa-4fc7-8925-0926ed242e9b-kube-api-access-pllsj\") pod \"mariadb-operator-controller-manager-6cd6d7bdf5-65sjq\" (UID: \"6772aabf-b5fa-4fc7-8925-0926ed242e9b\") " pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-65sjq" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.316875 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6cbc6dd547-h74m5"] Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.318029 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-65sjq" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.322149 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-h74m5" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.325523 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6cbc6dd547-h74m5"] Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.328073 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-xq7b8" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.329398 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dmhw\" (UniqueName: \"kubernetes.io/projected/cba353e7-9050-4433-a6b6-2ca4f67d077a-kube-api-access-8dmhw\") pod \"nova-operator-controller-manager-7c7fc454ff-lhs5r\" (UID: \"cba353e7-9050-4433-a6b6-2ca4f67d077a\") " pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-lhs5r" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.329586 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85dst\" (UniqueName: \"kubernetes.io/projected/d3a3ae96-9b43-42ab-b688-95e141f326f4-kube-api-access-85dst\") pod \"telemetry-operator-controller-manager-5d4d74dd89-6h28f\" (UID: \"d3a3ae96-9b43-42ab-b688-95e141f326f4\") " pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-6h28f" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.329620 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7hln\" (UniqueName: \"kubernetes.io/projected/f1d2f8b8-c6f2-450b-9e54-6a14b37e7a68-kube-api-access-p7hln\") pod \"test-operator-controller-manager-5cd5cb47d7-9bz2k\" (UID: \"f1d2f8b8-c6f2-450b-9e54-6a14b37e7a68\") " pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-9bz2k" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.329648 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a45ca91b-ddca-4c17-ab8b-d106345451d3-cert\") pod \"openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w\" (UID: \"a45ca91b-ddca-4c17-ab8b-d106345451d3\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.329665 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7sqc\" (UniqueName: \"kubernetes.io/projected/a45ca91b-ddca-4c17-ab8b-d106345451d3-kube-api-access-r7sqc\") pod \"openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w\" (UID: \"a45ca91b-ddca-4c17-ab8b-d106345451d3\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.329687 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5dtj\" (UniqueName: \"kubernetes.io/projected/422c537a-d341-45ac-ac02-3fb221b66ed4-kube-api-access-t5dtj\") pod \"octavia-operator-controller-manager-7468f855d8-4zvpz\" (UID: \"422c537a-d341-45ac-ac02-3fb221b66ed4\") " pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-4zvpz" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.329705 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hclh\" (UniqueName: \"kubernetes.io/projected/78a7e32f-67dc-454f-b65c-8a8a2605d139-kube-api-access-4hclh\") pod \"swift-operator-controller-manager-6859f9b676-7sw5d\" (UID: \"78a7e32f-67dc-454f-b65c-8a8a2605d139\") " pod="openstack-operators/swift-operator-controller-manager-6859f9b676-7sw5d" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.329740 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8gq9\" (UniqueName: \"kubernetes.io/projected/a8912110-fa72-4e6b-9c38-7b62b34772fa-kube-api-access-s8gq9\") pod \"ovn-operator-controller-manager-6d8b6f9b9-jtx9z\" (UID: \"a8912110-fa72-4e6b-9c38-7b62b34772fa\") " pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-jtx9z" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.329760 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22f2m\" (UniqueName: \"kubernetes.io/projected/9d40d3d9-711e-461b-b859-684b1af38ee9-kube-api-access-22f2m\") pod \"placement-operator-controller-manager-54689d9f88-h2npd\" (UID: \"9d40d3d9-711e-461b-b859-684b1af38ee9\") " pod="openstack-operators/placement-operator-controller-manager-54689d9f88-h2npd" Oct 08 07:30:49 crc kubenswrapper[4693]: E1008 07:30:49.329912 4693 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 08 07:30:49 crc kubenswrapper[4693]: E1008 07:30:49.329973 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a45ca91b-ddca-4c17-ab8b-d106345451d3-cert podName:a45ca91b-ddca-4c17-ab8b-d106345451d3 nodeName:}" failed. No retries permitted until 2025-10-08 07:30:49.829953516 +0000 UTC m=+835.200918451 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a45ca91b-ddca-4c17-ab8b-d106345451d3-cert") pod "openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w" (UID: "a45ca91b-ddca-4c17-ab8b-d106345451d3") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.349535 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8gq9\" (UniqueName: \"kubernetes.io/projected/a8912110-fa72-4e6b-9c38-7b62b34772fa-kube-api-access-s8gq9\") pod \"ovn-operator-controller-manager-6d8b6f9b9-jtx9z\" (UID: \"a8912110-fa72-4e6b-9c38-7b62b34772fa\") " pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-jtx9z" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.352565 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7sqc\" (UniqueName: \"kubernetes.io/projected/a45ca91b-ddca-4c17-ab8b-d106345451d3-kube-api-access-r7sqc\") pod \"openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w\" (UID: \"a45ca91b-ddca-4c17-ab8b-d106345451d3\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.354348 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22f2m\" (UniqueName: \"kubernetes.io/projected/9d40d3d9-711e-461b-b859-684b1af38ee9-kube-api-access-22f2m\") pod \"placement-operator-controller-manager-54689d9f88-h2npd\" (UID: \"9d40d3d9-711e-461b-b859-684b1af38ee9\") " pod="openstack-operators/placement-operator-controller-manager-54689d9f88-h2npd" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.355673 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dmhw\" (UniqueName: \"kubernetes.io/projected/cba353e7-9050-4433-a6b6-2ca4f67d077a-kube-api-access-8dmhw\") pod \"nova-operator-controller-manager-7c7fc454ff-lhs5r\" (UID: \"cba353e7-9050-4433-a6b6-2ca4f67d077a\") " pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-lhs5r" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.361667 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5dtj\" (UniqueName: \"kubernetes.io/projected/422c537a-d341-45ac-ac02-3fb221b66ed4-kube-api-access-t5dtj\") pod \"octavia-operator-controller-manager-7468f855d8-4zvpz\" (UID: \"422c537a-d341-45ac-ac02-3fb221b66ed4\") " pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-4zvpz" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.367390 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-567tm" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.387569 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-lhs5r" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.411158 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-4zvpz" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.422503 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7f66b9c549-m8hmw"] Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.423876 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7f66b9c549-m8hmw" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.430239 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-rrlrw" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.430419 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.430551 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nf2nd\" (UniqueName: \"kubernetes.io/projected/09c1a297-4a54-430a-a78e-134db76611b9-kube-api-access-nf2nd\") pod \"watcher-operator-controller-manager-6cbc6dd547-h74m5\" (UID: \"09c1a297-4a54-430a-a78e-134db76611b9\") " pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-h74m5" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.430628 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85dst\" (UniqueName: \"kubernetes.io/projected/d3a3ae96-9b43-42ab-b688-95e141f326f4-kube-api-access-85dst\") pod \"telemetry-operator-controller-manager-5d4d74dd89-6h28f\" (UID: \"d3a3ae96-9b43-42ab-b688-95e141f326f4\") " pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-6h28f" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.430649 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7hln\" (UniqueName: \"kubernetes.io/projected/f1d2f8b8-c6f2-450b-9e54-6a14b37e7a68-kube-api-access-p7hln\") pod \"test-operator-controller-manager-5cd5cb47d7-9bz2k\" (UID: \"f1d2f8b8-c6f2-450b-9e54-6a14b37e7a68\") " pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-9bz2k" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.430694 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hclh\" (UniqueName: \"kubernetes.io/projected/78a7e32f-67dc-454f-b65c-8a8a2605d139-kube-api-access-4hclh\") pod \"swift-operator-controller-manager-6859f9b676-7sw5d\" (UID: \"78a7e32f-67dc-454f-b65c-8a8a2605d139\") " pod="openstack-operators/swift-operator-controller-manager-6859f9b676-7sw5d" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.430718 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6fc858ec-6edd-4e45-ba44-fe2ea26a0614-cert\") pod \"infra-operator-controller-manager-658588b8c9-6vt47\" (UID: \"6fc858ec-6edd-4e45-ba44-fe2ea26a0614\") " pod="openstack-operators/infra-operator-controller-manager-658588b8c9-6vt47" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.433927 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7f66b9c549-m8hmw"] Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.441443 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-jtx9z" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.452364 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85dst\" (UniqueName: \"kubernetes.io/projected/d3a3ae96-9b43-42ab-b688-95e141f326f4-kube-api-access-85dst\") pod \"telemetry-operator-controller-manager-5d4d74dd89-6h28f\" (UID: \"d3a3ae96-9b43-42ab-b688-95e141f326f4\") " pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-6h28f" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.455318 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-dzwl8" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.455502 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7hln\" (UniqueName: \"kubernetes.io/projected/f1d2f8b8-c6f2-450b-9e54-6a14b37e7a68-kube-api-access-p7hln\") pod \"test-operator-controller-manager-5cd5cb47d7-9bz2k\" (UID: \"f1d2f8b8-c6f2-450b-9e54-6a14b37e7a68\") " pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-9bz2k" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.455802 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6fc858ec-6edd-4e45-ba44-fe2ea26a0614-cert\") pod \"infra-operator-controller-manager-658588b8c9-6vt47\" (UID: \"6fc858ec-6edd-4e45-ba44-fe2ea26a0614\") " pod="openstack-operators/infra-operator-controller-manager-658588b8c9-6vt47" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.461450 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-54689d9f88-h2npd" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.472392 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-6vt47" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.479634 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hclh\" (UniqueName: \"kubernetes.io/projected/78a7e32f-67dc-454f-b65c-8a8a2605d139-kube-api-access-4hclh\") pod \"swift-operator-controller-manager-6859f9b676-7sw5d\" (UID: \"78a7e32f-67dc-454f-b65c-8a8a2605d139\") " pod="openstack-operators/swift-operator-controller-manager-6859f9b676-7sw5d" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.479790 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-lqlgw"] Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.480727 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-lqlgw" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.483869 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-lqlgw"] Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.487018 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-7sw5d" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.489032 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-2plrq" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.505436 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-6h28f" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.534412 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcdtp\" (UniqueName: \"kubernetes.io/projected/5bfb052c-4d4a-47df-bb42-25424b56cb92-kube-api-access-xcdtp\") pod \"openstack-operator-controller-manager-7f66b9c549-m8hmw\" (UID: \"5bfb052c-4d4a-47df-bb42-25424b56cb92\") " pod="openstack-operators/openstack-operator-controller-manager-7f66b9c549-m8hmw" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.534460 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nf2nd\" (UniqueName: \"kubernetes.io/projected/09c1a297-4a54-430a-a78e-134db76611b9-kube-api-access-nf2nd\") pod \"watcher-operator-controller-manager-6cbc6dd547-h74m5\" (UID: \"09c1a297-4a54-430a-a78e-134db76611b9\") " pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-h74m5" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.534567 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5bfb052c-4d4a-47df-bb42-25424b56cb92-cert\") pod \"openstack-operator-controller-manager-7f66b9c549-m8hmw\" (UID: \"5bfb052c-4d4a-47df-bb42-25424b56cb92\") " pod="openstack-operators/openstack-operator-controller-manager-7f66b9c549-m8hmw" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.552493 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nf2nd\" (UniqueName: \"kubernetes.io/projected/09c1a297-4a54-430a-a78e-134db76611b9-kube-api-access-nf2nd\") pod \"watcher-operator-controller-manager-6cbc6dd547-h74m5\" (UID: \"09c1a297-4a54-430a-a78e-134db76611b9\") " pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-h74m5" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.598361 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-7d4d4f8d-d2ztn"] Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.598758 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-9bz2k" Oct 08 07:30:49 crc kubenswrapper[4693]: W1008 07:30:49.606475 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb855e40c_e0b0_4322_8099_d4e51c0b92f1.slice/crio-0bdf2b89e52137d2ee3bafff5a037823f35f3802e888be41ea2db86d3485fbbd WatchSource:0}: Error finding container 0bdf2b89e52137d2ee3bafff5a037823f35f3802e888be41ea2db86d3485fbbd: Status 404 returned error can't find the container with id 0bdf2b89e52137d2ee3bafff5a037823f35f3802e888be41ea2db86d3485fbbd Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.635892 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5bfb052c-4d4a-47df-bb42-25424b56cb92-cert\") pod \"openstack-operator-controller-manager-7f66b9c549-m8hmw\" (UID: \"5bfb052c-4d4a-47df-bb42-25424b56cb92\") " pod="openstack-operators/openstack-operator-controller-manager-7f66b9c549-m8hmw" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.635994 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcdtp\" (UniqueName: \"kubernetes.io/projected/5bfb052c-4d4a-47df-bb42-25424b56cb92-kube-api-access-xcdtp\") pod \"openstack-operator-controller-manager-7f66b9c549-m8hmw\" (UID: \"5bfb052c-4d4a-47df-bb42-25424b56cb92\") " pod="openstack-operators/openstack-operator-controller-manager-7f66b9c549-m8hmw" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.636028 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rc9kc\" (UniqueName: \"kubernetes.io/projected/b2a1cce0-35c1-46ed-b375-bb70c8a7c15f-kube-api-access-rc9kc\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-lqlgw\" (UID: \"b2a1cce0-35c1-46ed-b375-bb70c8a7c15f\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-lqlgw" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.649396 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5bfb052c-4d4a-47df-bb42-25424b56cb92-cert\") pod \"openstack-operator-controller-manager-7f66b9c549-m8hmw\" (UID: \"5bfb052c-4d4a-47df-bb42-25424b56cb92\") " pod="openstack-operators/openstack-operator-controller-manager-7f66b9c549-m8hmw" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.651498 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcdtp\" (UniqueName: \"kubernetes.io/projected/5bfb052c-4d4a-47df-bb42-25424b56cb92-kube-api-access-xcdtp\") pod \"openstack-operator-controller-manager-7f66b9c549-m8hmw\" (UID: \"5bfb052c-4d4a-47df-bb42-25424b56cb92\") " pod="openstack-operators/openstack-operator-controller-manager-7f66b9c549-m8hmw" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.656756 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-h74m5" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.738929 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rc9kc\" (UniqueName: \"kubernetes.io/projected/b2a1cce0-35c1-46ed-b375-bb70c8a7c15f-kube-api-access-rc9kc\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-lqlgw\" (UID: \"b2a1cce0-35c1-46ed-b375-bb70c8a7c15f\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-lqlgw" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.760464 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rc9kc\" (UniqueName: \"kubernetes.io/projected/b2a1cce0-35c1-46ed-b375-bb70c8a7c15f-kube-api-access-rc9kc\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-lqlgw\" (UID: \"b2a1cce0-35c1-46ed-b375-bb70c8a7c15f\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-lqlgw" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.829291 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7f66b9c549-m8hmw" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.839877 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a45ca91b-ddca-4c17-ab8b-d106345451d3-cert\") pod \"openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w\" (UID: \"a45ca91b-ddca-4c17-ab8b-d106345451d3\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w" Oct 08 07:30:49 crc kubenswrapper[4693]: E1008 07:30:49.840078 4693 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 08 07:30:49 crc kubenswrapper[4693]: E1008 07:30:49.840165 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a45ca91b-ddca-4c17-ab8b-d106345451d3-cert podName:a45ca91b-ddca-4c17-ab8b-d106345451d3 nodeName:}" failed. No retries permitted until 2025-10-08 07:30:50.840146331 +0000 UTC m=+836.211111266 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a45ca91b-ddca-4c17-ab8b-d106345451d3-cert") pod "openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w" (UID: "a45ca91b-ddca-4c17-ab8b-d106345451d3") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.856141 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-lqlgw" Oct 08 07:30:49 crc kubenswrapper[4693]: I1008 07:30:49.934488 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-58c4cd55f4-5tq42"] Oct 08 07:30:49 crc kubenswrapper[4693]: W1008 07:30:49.950459 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcd94a973_75b2_4722_a298_16e6bd67aa61.slice/crio-676eee64404fb3d7c4d2700578229b8626c837880ddecc78359b8e1723d1693c WatchSource:0}: Error finding container 676eee64404fb3d7c4d2700578229b8626c837880ddecc78359b8e1723d1693c: Status 404 returned error can't find the container with id 676eee64404fb3d7c4d2700578229b8626c837880ddecc78359b8e1723d1693c Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.222970 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-75dfd9b554-dmgt7"] Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.230700 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-54b4974c45-h25pk"] Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.235493 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-vqp2x"] Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.251462 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-76d5b87f47-njkdz"] Oct 08 07:30:50 crc kubenswrapper[4693]: W1008 07:30:50.255386 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbfc23a1a_faab_44e8_91f7_29d4e95f0fdc.slice/crio-0d163e249eb984f1152f1b90e605875f783670671ec9c283159333906fb3e6a8 WatchSource:0}: Error finding container 0d163e249eb984f1152f1b90e605875f783670671ec9c283159333906fb3e6a8: Status 404 returned error can't find the container with id 0d163e249eb984f1152f1b90e605875f783670671ec9c283159333906fb3e6a8 Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.355158 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-njkdz" event={"ID":"bfc23a1a-faab-44e8-91f7-29d4e95f0fdc","Type":"ContainerStarted","Data":"0d163e249eb984f1152f1b90e605875f783670671ec9c283159333906fb3e6a8"} Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.356074 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-dmgt7" event={"ID":"2f8dab68-da73-412a-bf83-95f2ac37f289","Type":"ContainerStarted","Data":"9ee671129507f3a4261dc6272ee07069fefe5b63a2f986b32c7270f11d4d8e2a"} Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.357204 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-vqp2x" event={"ID":"d26d490a-dba0-46d4-b636-836a4dde53be","Type":"ContainerStarted","Data":"f1ef567fe26f813616eb9044a88e33a5c8c90d3f6dd535aaf6269c38918f6e49"} Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.358041 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-d2ztn" event={"ID":"b855e40c-e0b0-4322-8099-d4e51c0b92f1","Type":"ContainerStarted","Data":"0bdf2b89e52137d2ee3bafff5a037823f35f3802e888be41ea2db86d3485fbbd"} Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.358902 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-54b4974c45-h25pk" event={"ID":"81212063-ccc7-423c-b817-60f7280ee4f9","Type":"ContainerStarted","Data":"e9bd456886291f2ca4b72913d3522154bcac4ed596956f8c6c5a92a3641bc21a"} Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.360155 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-5tq42" event={"ID":"cd94a973-75b2-4722-a298-16e6bd67aa61","Type":"ContainerStarted","Data":"676eee64404fb3d7c4d2700578229b8626c837880ddecc78359b8e1723d1693c"} Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.560958 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dzwl8"] Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.603339 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-7c7fc454ff-lhs5r"] Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.633394 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-54689d9f88-h2npd"] Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.652416 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-65sjq"] Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.662902 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7468f855d8-4zvpz"] Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.666993 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-5dc44df7d5-hbqzd"] Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.671489 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-6859f9b676-7sw5d"] Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.687124 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-6h28f"] Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.692462 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-jtx9z"] Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.698792 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-65d89cfd9f-blmhk"] Oct 08 07:30:50 crc kubenswrapper[4693]: E1008 07:30:50.702450 4693 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:785670b14b19ffd7e0799dcf3e3e275329fa822d4a604eace09574f8bb1f8162,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wlvxt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-649675d675-w96xx_openstack-operators(d96bb98a-f416-4d93-b145-37632210f2f8): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.703391 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-649675d675-w96xx"] Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.707629 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5cd5cb47d7-9bz2k"] Oct 08 07:30:50 crc kubenswrapper[4693]: E1008 07:30:50.716202 4693 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:bf55026ba10b80e1e24733078bd204cef8766d21a305fd000707a1e3b30ff52e,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-85dst,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-5d4d74dd89-6h28f_openstack-operators(d3a3ae96-9b43-42ab-b688-95e141f326f4): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 08 07:30:50 crc kubenswrapper[4693]: E1008 07:30:50.716599 4693 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rc9kc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-5f97d8c699-lqlgw_openstack-operators(b2a1cce0-35c1-46ed-b375-bb70c8a7c15f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.721395 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-lqlgw"] Oct 08 07:30:50 crc kubenswrapper[4693]: E1008 07:30:50.722196 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-lqlgw" podUID="b2a1cce0-35c1-46ed-b375-bb70c8a7c15f" Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.726724 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-8d984cc4d-567tm"] Oct 08 07:30:50 crc kubenswrapper[4693]: E1008 07:30:50.726764 4693 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:0daf76cc40ab619ae266b11defcc1b65beb22d859369e7b1b04de9169089a4cb,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-p7hln,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5cd5cb47d7-9bz2k_openstack-operators(f1d2f8b8-c6f2-450b-9e54-6a14b37e7a68): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 08 07:30:50 crc kubenswrapper[4693]: E1008 07:30:50.727549 4693 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:b6cef68bfaacdf992a9fa1a6b03a848a48c18cbb6ed12d95561b4b37d858b99f,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9w9kd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-658588b8c9-6vt47_openstack-operators(6fc858ec-6edd-4e45-ba44-fe2ea26a0614): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 08 07:30:50 crc kubenswrapper[4693]: E1008 07:30:50.727694 4693 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:dfd044635f9df9ed1d249387fa622177db35cdc72475e1c570617b8d17c64862,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kxw7c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-8d984cc4d-567tm_openstack-operators(3fedcb35-9741-40ee-bdb0-a1d78a5da3e6): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.732238 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6cbc6dd547-h74m5"] Oct 08 07:30:50 crc kubenswrapper[4693]: E1008 07:30:50.735854 4693 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:64f57b2b59dea2bd9fae91490c5bec2687131884a049e6579819d9f951b877c6,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-nf2nd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-6cbc6dd547-h74m5_openstack-operators(09c1a297-4a54-430a-a78e-134db76611b9): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.741287 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-658588b8c9-6vt47"] Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.753579 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7f66b9c549-m8hmw"] Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.853501 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a45ca91b-ddca-4c17-ab8b-d106345451d3-cert\") pod \"openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w\" (UID: \"a45ca91b-ddca-4c17-ab8b-d106345451d3\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w" Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.859279 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a45ca91b-ddca-4c17-ab8b-d106345451d3-cert\") pod \"openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w\" (UID: \"a45ca91b-ddca-4c17-ab8b-d106345451d3\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w" Oct 08 07:30:50 crc kubenswrapper[4693]: E1008 07:30:50.891158 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-9bz2k" podUID="f1d2f8b8-c6f2-450b-9e54-6a14b37e7a68" Oct 08 07:30:50 crc kubenswrapper[4693]: E1008 07:30:50.893592 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ironic-operator-controller-manager-649675d675-w96xx" podUID="d96bb98a-f416-4d93-b145-37632210f2f8" Oct 08 07:30:50 crc kubenswrapper[4693]: I1008 07:30:50.923618 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w" Oct 08 07:30:51 crc kubenswrapper[4693]: E1008 07:30:51.110717 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-567tm" podUID="3fedcb35-9741-40ee-bdb0-a1d78a5da3e6" Oct 08 07:30:51 crc kubenswrapper[4693]: E1008 07:30:51.122415 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-6h28f" podUID="d3a3ae96-9b43-42ab-b688-95e141f326f4" Oct 08 07:30:51 crc kubenswrapper[4693]: E1008 07:30:51.124355 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-6vt47" podUID="6fc858ec-6edd-4e45-ba44-fe2ea26a0614" Oct 08 07:30:51 crc kubenswrapper[4693]: E1008 07:30:51.127903 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-h74m5" podUID="09c1a297-4a54-430a-a78e-134db76611b9" Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.352523 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w"] Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.405738 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-567tm" event={"ID":"3fedcb35-9741-40ee-bdb0-a1d78a5da3e6","Type":"ContainerStarted","Data":"6d612eb878728dc00baac35a109d10bf8c4af01526860925a81e2a1acb891424"} Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.405764 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-567tm" event={"ID":"3fedcb35-9741-40ee-bdb0-a1d78a5da3e6","Type":"ContainerStarted","Data":"884acb0ceac28d0a2aad879c5e774f37250d1bf0ab54fba07cec61dab78134de"} Oct 08 07:30:51 crc kubenswrapper[4693]: E1008 07:30:51.406940 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:dfd044635f9df9ed1d249387fa622177db35cdc72475e1c570617b8d17c64862\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-567tm" podUID="3fedcb35-9741-40ee-bdb0-a1d78a5da3e6" Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.423086 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-h74m5" event={"ID":"09c1a297-4a54-430a-a78e-134db76611b9","Type":"ContainerStarted","Data":"fb0f4520c5cdc500b1bff44aa4dcbfb1635b22ac8d513e84b03a137f67729a56"} Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.423131 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-h74m5" event={"ID":"09c1a297-4a54-430a-a78e-134db76611b9","Type":"ContainerStarted","Data":"7f15f7749cd6fec7f73473121e50d40ada0a8de2bd1e960c01b0a04df5796ff2"} Oct 08 07:30:51 crc kubenswrapper[4693]: E1008 07:30:51.424906 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:64f57b2b59dea2bd9fae91490c5bec2687131884a049e6579819d9f951b877c6\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-h74m5" podUID="09c1a297-4a54-430a-a78e-134db76611b9" Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.425944 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-blmhk" event={"ID":"b1c54c65-b7f3-4f2e-91c8-c04f25b42d3f","Type":"ContainerStarted","Data":"f9c4d3996c8822fc62ac419bbf33cd55dc48b78aafda51eb26497505c9843152"} Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.448921 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-6vt47" event={"ID":"6fc858ec-6edd-4e45-ba44-fe2ea26a0614","Type":"ContainerStarted","Data":"a74a6f67e49faa73996061ddcde9391e7670265ec36fb127efd852d9a63945b1"} Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.448962 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-6vt47" event={"ID":"6fc858ec-6edd-4e45-ba44-fe2ea26a0614","Type":"ContainerStarted","Data":"34c7e4fa6ea204fc74f86663145880d9bfc76399ab2069bdd8e28a3d429f2254"} Oct 08 07:30:51 crc kubenswrapper[4693]: E1008 07:30:51.450518 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:b6cef68bfaacdf992a9fa1a6b03a848a48c18cbb6ed12d95561b4b37d858b99f\\\"\"" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-6vt47" podUID="6fc858ec-6edd-4e45-ba44-fe2ea26a0614" Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.457867 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-jtx9z" event={"ID":"a8912110-fa72-4e6b-9c38-7b62b34772fa","Type":"ContainerStarted","Data":"ae51d14db5aa48cee72da4d62dc2145d8584e308281f26381c6b0503faff6bbd"} Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.459059 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-lhs5r" event={"ID":"cba353e7-9050-4433-a6b6-2ca4f67d077a","Type":"ContainerStarted","Data":"e2d96ce5f41726c08a5b1f161481a3b520dffa899448570799e717d726bfcf2d"} Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.459736 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-hbqzd" event={"ID":"72f3f2ae-ba07-4045-9ac4-fc4f0dee2682","Type":"ContainerStarted","Data":"95acc0926bceeedfcee66e581ce53cb6895a408916214f31341f20cf1f602f38"} Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.462094 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-9bz2k" event={"ID":"f1d2f8b8-c6f2-450b-9e54-6a14b37e7a68","Type":"ContainerStarted","Data":"2ef2988dfc45837eb8120f38b60773ae94d732e4424e262ee4951fc2ea654d00"} Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.462118 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-9bz2k" event={"ID":"f1d2f8b8-c6f2-450b-9e54-6a14b37e7a68","Type":"ContainerStarted","Data":"067a0b4eb903d63206030e9a29ac1ef30ca3283d27be74c63c213b9359ed097e"} Oct 08 07:30:51 crc kubenswrapper[4693]: E1008 07:30:51.464665 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:0daf76cc40ab619ae266b11defcc1b65beb22d859369e7b1b04de9169089a4cb\\\"\"" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-9bz2k" podUID="f1d2f8b8-c6f2-450b-9e54-6a14b37e7a68" Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.475836 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-6h28f" event={"ID":"d3a3ae96-9b43-42ab-b688-95e141f326f4","Type":"ContainerStarted","Data":"287faf22418395201ceb8f8c3f6b12c82b8946a9fda87c896eb97b3cf32c0794"} Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.475876 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-6h28f" event={"ID":"d3a3ae96-9b43-42ab-b688-95e141f326f4","Type":"ContainerStarted","Data":"a0937286278fa40c29eed721cba1889494e5df94446a144567046bf2a7ef1413"} Oct 08 07:30:51 crc kubenswrapper[4693]: E1008 07:30:51.478409 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:bf55026ba10b80e1e24733078bd204cef8766d21a305fd000707a1e3b30ff52e\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-6h28f" podUID="d3a3ae96-9b43-42ab-b688-95e141f326f4" Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.489452 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7f66b9c549-m8hmw" event={"ID":"5bfb052c-4d4a-47df-bb42-25424b56cb92","Type":"ContainerStarted","Data":"6eccb83939b5b6dc9d2cc01123901a0a95ff105c76014795130fbd24de86b214"} Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.489488 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7f66b9c549-m8hmw" event={"ID":"5bfb052c-4d4a-47df-bb42-25424b56cb92","Type":"ContainerStarted","Data":"b6adfec92f04617e0d5aad8791c76138e0088b7ee92f112abf245cefa3ebeab6"} Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.489500 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7f66b9c549-m8hmw" event={"ID":"5bfb052c-4d4a-47df-bb42-25424b56cb92","Type":"ContainerStarted","Data":"16c2a4b6c1e5aaf9b2f8eed53e4ba289d112a71f32087a6a42a9b7a905cfebb5"} Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.490766 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-7f66b9c549-m8hmw" Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.491695 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-65sjq" event={"ID":"6772aabf-b5fa-4fc7-8925-0926ed242e9b","Type":"ContainerStarted","Data":"c1996a131dd6a9b3347515f968ee66771de1988adbf08a51a5d0e637adc54cf0"} Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.494305 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-54689d9f88-h2npd" event={"ID":"9d40d3d9-711e-461b-b859-684b1af38ee9","Type":"ContainerStarted","Data":"d2819aaebf387dea3468b637ac40091aa99ad82ad2ea683003b30d2ab02cc50f"} Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.496728 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-lqlgw" event={"ID":"b2a1cce0-35c1-46ed-b375-bb70c8a7c15f","Type":"ContainerStarted","Data":"46b273254ebef36abd025520c6bc6dc15210b73b60e220fb9a6413f0e87b13a3"} Oct 08 07:30:51 crc kubenswrapper[4693]: E1008 07:30:51.502256 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-lqlgw" podUID="b2a1cce0-35c1-46ed-b375-bb70c8a7c15f" Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.503335 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-7sw5d" event={"ID":"78a7e32f-67dc-454f-b65c-8a8a2605d139","Type":"ContainerStarted","Data":"6a617b12933a0b7ddaae77d0aa2f71a171ad038e99ea24223fa1afc2f47add26"} Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.506436 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-4zvpz" event={"ID":"422c537a-d341-45ac-ac02-3fb221b66ed4","Type":"ContainerStarted","Data":"67c1a78d2ca4b43d8fbbec43b7c4b620635ec9f4a1ed8808c27f111cfb702631"} Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.509837 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-dzwl8" podUID="17985107-f4e4-4b3d-97d0-640d4d7134ea" containerName="registry-server" containerID="cri-o://82dd74807185c376810a4db00d1272a359804b67e759537b2d43df1bcb363088" gracePeriod=2 Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.510643 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-649675d675-w96xx" event={"ID":"d96bb98a-f416-4d93-b145-37632210f2f8","Type":"ContainerStarted","Data":"5cca6eee618d65da818a47a02d3972550219db471e9f694dcec267943a553833"} Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.510663 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-649675d675-w96xx" event={"ID":"d96bb98a-f416-4d93-b145-37632210f2f8","Type":"ContainerStarted","Data":"00fa36c2a1a3216aa0bbbbf0092ea5c66ced2dc03ffda0ea20f63c1300a03c01"} Oct 08 07:30:51 crc kubenswrapper[4693]: E1008 07:30:51.511298 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ironic-operator@sha256:785670b14b19ffd7e0799dcf3e3e275329fa822d4a604eace09574f8bb1f8162\\\"\"" pod="openstack-operators/ironic-operator-controller-manager-649675d675-w96xx" podUID="d96bb98a-f416-4d93-b145-37632210f2f8" Oct 08 07:30:51 crc kubenswrapper[4693]: I1008 07:30:51.545656 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-7f66b9c549-m8hmw" podStartSLOduration=2.545637751 podStartE2EDuration="2.545637751s" podCreationTimestamp="2025-10-08 07:30:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:30:51.537629889 +0000 UTC m=+836.908594824" watchObservedRunningTime="2025-10-08 07:30:51.545637751 +0000 UTC m=+836.916602686" Oct 08 07:30:52 crc kubenswrapper[4693]: I1008 07:30:52.072096 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dzwl8" Oct 08 07:30:52 crc kubenswrapper[4693]: I1008 07:30:52.101155 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/17985107-f4e4-4b3d-97d0-640d4d7134ea-utilities\") pod \"17985107-f4e4-4b3d-97d0-640d4d7134ea\" (UID: \"17985107-f4e4-4b3d-97d0-640d4d7134ea\") " Oct 08 07:30:52 crc kubenswrapper[4693]: I1008 07:30:52.101216 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/17985107-f4e4-4b3d-97d0-640d4d7134ea-catalog-content\") pod \"17985107-f4e4-4b3d-97d0-640d4d7134ea\" (UID: \"17985107-f4e4-4b3d-97d0-640d4d7134ea\") " Oct 08 07:30:52 crc kubenswrapper[4693]: I1008 07:30:52.101255 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b7vwb\" (UniqueName: \"kubernetes.io/projected/17985107-f4e4-4b3d-97d0-640d4d7134ea-kube-api-access-b7vwb\") pod \"17985107-f4e4-4b3d-97d0-640d4d7134ea\" (UID: \"17985107-f4e4-4b3d-97d0-640d4d7134ea\") " Oct 08 07:30:52 crc kubenswrapper[4693]: I1008 07:30:52.102458 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/17985107-f4e4-4b3d-97d0-640d4d7134ea-utilities" (OuterVolumeSpecName: "utilities") pod "17985107-f4e4-4b3d-97d0-640d4d7134ea" (UID: "17985107-f4e4-4b3d-97d0-640d4d7134ea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:30:52 crc kubenswrapper[4693]: I1008 07:30:52.102719 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/17985107-f4e4-4b3d-97d0-640d4d7134ea-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 07:30:52 crc kubenswrapper[4693]: I1008 07:30:52.129443 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17985107-f4e4-4b3d-97d0-640d4d7134ea-kube-api-access-b7vwb" (OuterVolumeSpecName: "kube-api-access-b7vwb") pod "17985107-f4e4-4b3d-97d0-640d4d7134ea" (UID: "17985107-f4e4-4b3d-97d0-640d4d7134ea"). InnerVolumeSpecName "kube-api-access-b7vwb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:30:52 crc kubenswrapper[4693]: I1008 07:30:52.179435 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/17985107-f4e4-4b3d-97d0-640d4d7134ea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "17985107-f4e4-4b3d-97d0-640d4d7134ea" (UID: "17985107-f4e4-4b3d-97d0-640d4d7134ea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:30:52 crc kubenswrapper[4693]: I1008 07:30:52.203568 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/17985107-f4e4-4b3d-97d0-640d4d7134ea-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 07:30:52 crc kubenswrapper[4693]: I1008 07:30:52.203594 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b7vwb\" (UniqueName: \"kubernetes.io/projected/17985107-f4e4-4b3d-97d0-640d4d7134ea-kube-api-access-b7vwb\") on node \"crc\" DevicePath \"\"" Oct 08 07:30:52 crc kubenswrapper[4693]: I1008 07:30:52.525608 4693 generic.go:334] "Generic (PLEG): container finished" podID="17985107-f4e4-4b3d-97d0-640d4d7134ea" containerID="82dd74807185c376810a4db00d1272a359804b67e759537b2d43df1bcb363088" exitCode=0 Oct 08 07:30:52 crc kubenswrapper[4693]: I1008 07:30:52.525682 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dzwl8" Oct 08 07:30:52 crc kubenswrapper[4693]: I1008 07:30:52.525689 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dzwl8" event={"ID":"17985107-f4e4-4b3d-97d0-640d4d7134ea","Type":"ContainerDied","Data":"82dd74807185c376810a4db00d1272a359804b67e759537b2d43df1bcb363088"} Oct 08 07:30:52 crc kubenswrapper[4693]: I1008 07:30:52.525847 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dzwl8" event={"ID":"17985107-f4e4-4b3d-97d0-640d4d7134ea","Type":"ContainerDied","Data":"7bc21121178d90fbb24aae54bd749ec999bdbc875c9335632ce45f84f8e06105"} Oct 08 07:30:52 crc kubenswrapper[4693]: I1008 07:30:52.525891 4693 scope.go:117] "RemoveContainer" containerID="82dd74807185c376810a4db00d1272a359804b67e759537b2d43df1bcb363088" Oct 08 07:30:52 crc kubenswrapper[4693]: I1008 07:30:52.533038 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w" event={"ID":"a45ca91b-ddca-4c17-ab8b-d106345451d3","Type":"ContainerStarted","Data":"7760b2c98a1e6f4f28e45b0197433b5de1cd4b294e88db577d058e388db8cf3c"} Oct 08 07:30:52 crc kubenswrapper[4693]: E1008 07:30:52.534558 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:0daf76cc40ab619ae266b11defcc1b65beb22d859369e7b1b04de9169089a4cb\\\"\"" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-9bz2k" podUID="f1d2f8b8-c6f2-450b-9e54-6a14b37e7a68" Oct 08 07:30:52 crc kubenswrapper[4693]: E1008 07:30:52.536115 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-lqlgw" podUID="b2a1cce0-35c1-46ed-b375-bb70c8a7c15f" Oct 08 07:30:52 crc kubenswrapper[4693]: E1008 07:30:52.536830 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:dfd044635f9df9ed1d249387fa622177db35cdc72475e1c570617b8d17c64862\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-567tm" podUID="3fedcb35-9741-40ee-bdb0-a1d78a5da3e6" Oct 08 07:30:52 crc kubenswrapper[4693]: E1008 07:30:52.536880 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ironic-operator@sha256:785670b14b19ffd7e0799dcf3e3e275329fa822d4a604eace09574f8bb1f8162\\\"\"" pod="openstack-operators/ironic-operator-controller-manager-649675d675-w96xx" podUID="d96bb98a-f416-4d93-b145-37632210f2f8" Oct 08 07:30:52 crc kubenswrapper[4693]: E1008 07:30:52.536961 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:64f57b2b59dea2bd9fae91490c5bec2687131884a049e6579819d9f951b877c6\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-h74m5" podUID="09c1a297-4a54-430a-a78e-134db76611b9" Oct 08 07:30:52 crc kubenswrapper[4693]: E1008 07:30:52.537493 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:bf55026ba10b80e1e24733078bd204cef8766d21a305fd000707a1e3b30ff52e\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-6h28f" podUID="d3a3ae96-9b43-42ab-b688-95e141f326f4" Oct 08 07:30:52 crc kubenswrapper[4693]: E1008 07:30:52.537589 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:b6cef68bfaacdf992a9fa1a6b03a848a48c18cbb6ed12d95561b4b37d858b99f\\\"\"" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-6vt47" podUID="6fc858ec-6edd-4e45-ba44-fe2ea26a0614" Oct 08 07:30:52 crc kubenswrapper[4693]: I1008 07:30:52.604978 4693 scope.go:117] "RemoveContainer" containerID="d36d725571806fcbd7495afea21b1cc0e54dce2051ceaa04569147af3ab4640c" Oct 08 07:30:52 crc kubenswrapper[4693]: I1008 07:30:52.643508 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dzwl8"] Oct 08 07:30:52 crc kubenswrapper[4693]: I1008 07:30:52.650595 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-dzwl8"] Oct 08 07:30:52 crc kubenswrapper[4693]: I1008 07:30:52.656058 4693 scope.go:117] "RemoveContainer" containerID="2b12cf3fb62fcbe4524754620526ed05985d9505931acf9495733dd1ce721ff9" Oct 08 07:30:53 crc kubenswrapper[4693]: I1008 07:30:53.266107 4693 scope.go:117] "RemoveContainer" containerID="82dd74807185c376810a4db00d1272a359804b67e759537b2d43df1bcb363088" Oct 08 07:30:53 crc kubenswrapper[4693]: E1008 07:30:53.266567 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"82dd74807185c376810a4db00d1272a359804b67e759537b2d43df1bcb363088\": container with ID starting with 82dd74807185c376810a4db00d1272a359804b67e759537b2d43df1bcb363088 not found: ID does not exist" containerID="82dd74807185c376810a4db00d1272a359804b67e759537b2d43df1bcb363088" Oct 08 07:30:53 crc kubenswrapper[4693]: I1008 07:30:53.266613 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"82dd74807185c376810a4db00d1272a359804b67e759537b2d43df1bcb363088"} err="failed to get container status \"82dd74807185c376810a4db00d1272a359804b67e759537b2d43df1bcb363088\": rpc error: code = NotFound desc = could not find container \"82dd74807185c376810a4db00d1272a359804b67e759537b2d43df1bcb363088\": container with ID starting with 82dd74807185c376810a4db00d1272a359804b67e759537b2d43df1bcb363088 not found: ID does not exist" Oct 08 07:30:53 crc kubenswrapper[4693]: I1008 07:30:53.266640 4693 scope.go:117] "RemoveContainer" containerID="d36d725571806fcbd7495afea21b1cc0e54dce2051ceaa04569147af3ab4640c" Oct 08 07:30:53 crc kubenswrapper[4693]: E1008 07:30:53.268054 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d36d725571806fcbd7495afea21b1cc0e54dce2051ceaa04569147af3ab4640c\": container with ID starting with d36d725571806fcbd7495afea21b1cc0e54dce2051ceaa04569147af3ab4640c not found: ID does not exist" containerID="d36d725571806fcbd7495afea21b1cc0e54dce2051ceaa04569147af3ab4640c" Oct 08 07:30:53 crc kubenswrapper[4693]: I1008 07:30:53.268085 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d36d725571806fcbd7495afea21b1cc0e54dce2051ceaa04569147af3ab4640c"} err="failed to get container status \"d36d725571806fcbd7495afea21b1cc0e54dce2051ceaa04569147af3ab4640c\": rpc error: code = NotFound desc = could not find container \"d36d725571806fcbd7495afea21b1cc0e54dce2051ceaa04569147af3ab4640c\": container with ID starting with d36d725571806fcbd7495afea21b1cc0e54dce2051ceaa04569147af3ab4640c not found: ID does not exist" Oct 08 07:30:53 crc kubenswrapper[4693]: I1008 07:30:53.268106 4693 scope.go:117] "RemoveContainer" containerID="2b12cf3fb62fcbe4524754620526ed05985d9505931acf9495733dd1ce721ff9" Oct 08 07:30:53 crc kubenswrapper[4693]: E1008 07:30:53.268345 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b12cf3fb62fcbe4524754620526ed05985d9505931acf9495733dd1ce721ff9\": container with ID starting with 2b12cf3fb62fcbe4524754620526ed05985d9505931acf9495733dd1ce721ff9 not found: ID does not exist" containerID="2b12cf3fb62fcbe4524754620526ed05985d9505931acf9495733dd1ce721ff9" Oct 08 07:30:53 crc kubenswrapper[4693]: I1008 07:30:53.268420 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b12cf3fb62fcbe4524754620526ed05985d9505931acf9495733dd1ce721ff9"} err="failed to get container status \"2b12cf3fb62fcbe4524754620526ed05985d9505931acf9495733dd1ce721ff9\": rpc error: code = NotFound desc = could not find container \"2b12cf3fb62fcbe4524754620526ed05985d9505931acf9495733dd1ce721ff9\": container with ID starting with 2b12cf3fb62fcbe4524754620526ed05985d9505931acf9495733dd1ce721ff9 not found: ID does not exist" Oct 08 07:30:53 crc kubenswrapper[4693]: I1008 07:30:53.371334 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17985107-f4e4-4b3d-97d0-640d4d7134ea" path="/var/lib/kubelet/pods/17985107-f4e4-4b3d-97d0-640d4d7134ea/volumes" Oct 08 07:30:59 crc kubenswrapper[4693]: I1008 07:30:59.840711 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-7f66b9c549-m8hmw" Oct 08 07:31:02 crc kubenswrapper[4693]: E1008 07:31:02.963300 4693 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed" Oct 08 07:31:02 crc kubenswrapper[4693]: E1008 07:31:02.963714 4693 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4hclh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-6859f9b676-7sw5d_openstack-operators(78a7e32f-67dc-454f-b65c-8a8a2605d139): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 08 07:31:03 crc kubenswrapper[4693]: E1008 07:31:03.531980 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-7sw5d" podUID="78a7e32f-67dc-454f-b65c-8a8a2605d139" Oct 08 07:31:03 crc kubenswrapper[4693]: I1008 07:31:03.628484 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-4zvpz" event={"ID":"422c537a-d341-45ac-ac02-3fb221b66ed4","Type":"ContainerStarted","Data":"1eef5cea5329fb7fc3e1350b42489cc78d7a2b56e9b441864687717bd8782372"} Oct 08 07:31:03 crc kubenswrapper[4693]: I1008 07:31:03.630246 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-d2ztn" event={"ID":"b855e40c-e0b0-4322-8099-d4e51c0b92f1","Type":"ContainerStarted","Data":"226c0efa84a8fd2e1fcb296f333ab99e5fdc9010b4977354ae604ac837ba7344"} Oct 08 07:31:03 crc kubenswrapper[4693]: I1008 07:31:03.631069 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-5tq42" event={"ID":"cd94a973-75b2-4722-a298-16e6bd67aa61","Type":"ContainerStarted","Data":"142c54547ddf1156f6df0529d0a02ce7b9d4c28958817899c72d8e92568c04bf"} Oct 08 07:31:03 crc kubenswrapper[4693]: I1008 07:31:03.632167 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-7sw5d" event={"ID":"78a7e32f-67dc-454f-b65c-8a8a2605d139","Type":"ContainerStarted","Data":"840166fac37f0aa5e57e7aa61111065b8dde8d5e5c21109c6f264d6f6b9d0924"} Oct 08 07:31:03 crc kubenswrapper[4693]: E1008 07:31:03.633700 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed\\\"\"" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-7sw5d" podUID="78a7e32f-67dc-454f-b65c-8a8a2605d139" Oct 08 07:31:03 crc kubenswrapper[4693]: I1008 07:31:03.634259 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-dmgt7" event={"ID":"2f8dab68-da73-412a-bf83-95f2ac37f289","Type":"ContainerStarted","Data":"6685c7f3a4b9c5209c19037abdb0de13c809e9b7ae0a44e3a178e641e5201007"} Oct 08 07:31:03 crc kubenswrapper[4693]: I1008 07:31:03.637647 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-54689d9f88-h2npd" event={"ID":"9d40d3d9-711e-461b-b859-684b1af38ee9","Type":"ContainerStarted","Data":"c2830adbcb72485e2ab5d975a951fabee671cf0d07d484877043b3d4de2350f9"} Oct 08 07:31:03 crc kubenswrapper[4693]: I1008 07:31:03.639756 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-blmhk" event={"ID":"b1c54c65-b7f3-4f2e-91c8-c04f25b42d3f","Type":"ContainerStarted","Data":"2c61f0b237bc3091838a3d3770867ba506ae08ca3de12d1210f49b5a6cea5525"} Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.647587 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w" event={"ID":"a45ca91b-ddca-4c17-ab8b-d106345451d3","Type":"ContainerStarted","Data":"d6e86cc11f4445f6a61ef74fdc92dc26284eacb0cc2071edf6ecffa53a368171"} Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.647846 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w" event={"ID":"a45ca91b-ddca-4c17-ab8b-d106345451d3","Type":"ContainerStarted","Data":"928d15eefe5bfc38816cb20914c435f5cff24db8713972f96cd2d68d7d264e5c"} Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.647894 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w" Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.649191 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-jtx9z" event={"ID":"a8912110-fa72-4e6b-9c38-7b62b34772fa","Type":"ContainerStarted","Data":"131ac378059c038caf07f437b7aab1834cd0cb577e17120be7f54e027a037e83"} Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.649234 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-jtx9z" event={"ID":"a8912110-fa72-4e6b-9c38-7b62b34772fa","Type":"ContainerStarted","Data":"c08c63f70e50392dc5000fcf0aee4abd711f0050db67b9a1c2ee7afaaa1397f4"} Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.649339 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-jtx9z" Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.650366 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-65sjq" event={"ID":"6772aabf-b5fa-4fc7-8925-0926ed242e9b","Type":"ContainerStarted","Data":"85ec180d35473987f0a7664b666326bc0c9b7e4d511da178e0d847985825b536"} Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.650389 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-65sjq" event={"ID":"6772aabf-b5fa-4fc7-8925-0926ed242e9b","Type":"ContainerStarted","Data":"708ad7510ab01340f560b3eee1a1100b980278c7136433f4fc34376a5a8243ec"} Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.650712 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-65sjq" Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.651955 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-d2ztn" event={"ID":"b855e40c-e0b0-4322-8099-d4e51c0b92f1","Type":"ContainerStarted","Data":"23863f3c7f25ebd54a7c26f0224ccb6bf60e5c238f5270b5e7a741f65430db0d"} Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.652284 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-d2ztn" Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.653626 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-njkdz" event={"ID":"bfc23a1a-faab-44e8-91f7-29d4e95f0fdc","Type":"ContainerStarted","Data":"6e0f30f913eca803f92cba6aceb9185e04cf573869ecf4cfe9cc6af1dc060dd4"} Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.653650 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-njkdz" event={"ID":"bfc23a1a-faab-44e8-91f7-29d4e95f0fdc","Type":"ContainerStarted","Data":"a3267a39698b3fcbaccce8407c05ced4e955a40b9eaac7345c603686e2a77551"} Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.653754 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-njkdz" Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.655252 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-5tq42" event={"ID":"cd94a973-75b2-4722-a298-16e6bd67aa61","Type":"ContainerStarted","Data":"47045df806b98c56af1d0caa54aa15882ce53aba288cb6a409171745f829517d"} Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.655376 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-5tq42" Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.656994 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-4zvpz" event={"ID":"422c537a-d341-45ac-ac02-3fb221b66ed4","Type":"ContainerStarted","Data":"d42b8d33742d83e208a88642f9527c0ed37200273ce8c04c4518bb0bf3d95d88"} Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.657117 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-4zvpz" Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.658590 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-54689d9f88-h2npd" event={"ID":"9d40d3d9-711e-461b-b859-684b1af38ee9","Type":"ContainerStarted","Data":"6854d62e0f6bcf6cccb87f9b80af1b1c75f988ec110366ef58a4ffb167f98877"} Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.658691 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-54689d9f88-h2npd" Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.660014 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-lhs5r" event={"ID":"cba353e7-9050-4433-a6b6-2ca4f67d077a","Type":"ContainerStarted","Data":"9bcaab4432f0216ce92dcf8b0e6e3ad6349cd0efc4a5c19d83986e502d2c3991"} Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.660050 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-lhs5r" event={"ID":"cba353e7-9050-4433-a6b6-2ca4f67d077a","Type":"ContainerStarted","Data":"7f527bd283c3c76251f08ff99a4c4cfeff6a4687c33f477adec5574a69a5bdcf"} Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.660110 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-lhs5r" Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.661644 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-54b4974c45-h25pk" event={"ID":"81212063-ccc7-423c-b817-60f7280ee4f9","Type":"ContainerStarted","Data":"9062e4b8ec0aeece4872a8a07ff98b252bf3b173feaaf1db8615d43da5160f7a"} Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.661671 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-54b4974c45-h25pk" event={"ID":"81212063-ccc7-423c-b817-60f7280ee4f9","Type":"ContainerStarted","Data":"a6410f52ca3d250e2b4cc964fc2296c8d2ad8aadbf9ec040649c8811076cab4a"} Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.661769 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-54b4974c45-h25pk" Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.663190 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-blmhk" event={"ID":"b1c54c65-b7f3-4f2e-91c8-c04f25b42d3f","Type":"ContainerStarted","Data":"8b3456f965c283706c7ede1ffe3ec989f76244eccf81f8acf61ab9fbeee2fcea"} Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.663556 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-blmhk" Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.664917 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-hbqzd" event={"ID":"72f3f2ae-ba07-4045-9ac4-fc4f0dee2682","Type":"ContainerStarted","Data":"d253e7defa498559542d14ac98f9bad207d019ae2d9d096c6ffbf5369c209f98"} Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.664938 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-hbqzd" event={"ID":"72f3f2ae-ba07-4045-9ac4-fc4f0dee2682","Type":"ContainerStarted","Data":"cec04787e431bfcbb60de674eb720781d0fff3db6d2da18635e1a92ed1fe268b"} Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.666015 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-hbqzd" Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.667173 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-dmgt7" event={"ID":"2f8dab68-da73-412a-bf83-95f2ac37f289","Type":"ContainerStarted","Data":"ef02cf746cb1be91ddfc5e504bddf050ccecf2c6ae0909212bf9f1d5a45b5cd0"} Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.667504 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-dmgt7" Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.668865 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-vqp2x" event={"ID":"d26d490a-dba0-46d4-b636-836a4dde53be","Type":"ContainerStarted","Data":"be033e91ffbff2065f62303c44dde330d841cabdf1e2cd530f46f300d8d4182a"} Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.668886 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-vqp2x" event={"ID":"d26d490a-dba0-46d4-b636-836a4dde53be","Type":"ContainerStarted","Data":"97c4250b7d93973098c493bda3db035ca5b65f663f62c24f1f4d8e4d9e4eb696"} Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.669169 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-vqp2x" Oct 08 07:31:04 crc kubenswrapper[4693]: E1008 07:31:04.669668 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed\\\"\"" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-7sw5d" podUID="78a7e32f-67dc-454f-b65c-8a8a2605d139" Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.686451 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w" podStartSLOduration=5.020845651 podStartE2EDuration="16.686431938s" podCreationTimestamp="2025-10-08 07:30:48 +0000 UTC" firstStartedPulling="2025-10-08 07:30:51.410471555 +0000 UTC m=+836.781436490" lastFinishedPulling="2025-10-08 07:31:03.076057842 +0000 UTC m=+848.447022777" observedRunningTime="2025-10-08 07:31:04.683925566 +0000 UTC m=+850.054890501" watchObservedRunningTime="2025-10-08 07:31:04.686431938 +0000 UTC m=+850.057396873" Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.708144 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-54b4974c45-h25pk" podStartSLOduration=3.875713889 podStartE2EDuration="16.708126083s" podCreationTimestamp="2025-10-08 07:30:48 +0000 UTC" firstStartedPulling="2025-10-08 07:30:50.244429078 +0000 UTC m=+835.615394013" lastFinishedPulling="2025-10-08 07:31:03.076841272 +0000 UTC m=+848.447806207" observedRunningTime="2025-10-08 07:31:04.706967884 +0000 UTC m=+850.077932819" watchObservedRunningTime="2025-10-08 07:31:04.708126083 +0000 UTC m=+850.079091018" Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.728652 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-jtx9z" podStartSLOduration=4.370177469 podStartE2EDuration="16.728635609s" podCreationTimestamp="2025-10-08 07:30:48 +0000 UTC" firstStartedPulling="2025-10-08 07:30:50.702202803 +0000 UTC m=+836.073167738" lastFinishedPulling="2025-10-08 07:31:03.060660943 +0000 UTC m=+848.431625878" observedRunningTime="2025-10-08 07:31:04.723649076 +0000 UTC m=+850.094614011" watchObservedRunningTime="2025-10-08 07:31:04.728635609 +0000 UTC m=+850.099600544" Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.749116 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-njkdz" podStartSLOduration=3.970548641 podStartE2EDuration="16.749101153s" podCreationTimestamp="2025-10-08 07:30:48 +0000 UTC" firstStartedPulling="2025-10-08 07:30:50.257494884 +0000 UTC m=+835.628459819" lastFinishedPulling="2025-10-08 07:31:03.036047356 +0000 UTC m=+848.407012331" observedRunningTime="2025-10-08 07:31:04.743365472 +0000 UTC m=+850.114330417" watchObservedRunningTime="2025-10-08 07:31:04.749101153 +0000 UTC m=+850.120066088" Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.799070 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-lhs5r" podStartSLOduration=4.415748921 podStartE2EDuration="16.799052035s" podCreationTimestamp="2025-10-08 07:30:48 +0000 UTC" firstStartedPulling="2025-10-08 07:30:50.647332328 +0000 UTC m=+836.018297273" lastFinishedPulling="2025-10-08 07:31:03.030635412 +0000 UTC m=+848.401600387" observedRunningTime="2025-10-08 07:31:04.769176178 +0000 UTC m=+850.140141113" watchObservedRunningTime="2025-10-08 07:31:04.799052035 +0000 UTC m=+850.170016970" Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.800749 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-4zvpz" podStartSLOduration=4.463542598 podStartE2EDuration="16.800742527s" podCreationTimestamp="2025-10-08 07:30:48 +0000 UTC" firstStartedPulling="2025-10-08 07:30:50.694966261 +0000 UTC m=+836.065931196" lastFinishedPulling="2025-10-08 07:31:03.03216619 +0000 UTC m=+848.403131125" observedRunningTime="2025-10-08 07:31:04.794855872 +0000 UTC m=+850.165820807" watchObservedRunningTime="2025-10-08 07:31:04.800742527 +0000 UTC m=+850.171707462" Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.830997 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-65sjq" podStartSLOduration=4.495148527 podStartE2EDuration="16.830977313s" podCreationTimestamp="2025-10-08 07:30:48 +0000 UTC" firstStartedPulling="2025-10-08 07:30:50.694628442 +0000 UTC m=+836.065593377" lastFinishedPulling="2025-10-08 07:31:03.030457228 +0000 UTC m=+848.401422163" observedRunningTime="2025-10-08 07:31:04.819068919 +0000 UTC m=+850.190033854" watchObservedRunningTime="2025-10-08 07:31:04.830977313 +0000 UTC m=+850.201942248" Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.831533 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-hbqzd" podStartSLOduration=4.498244164 podStartE2EDuration="16.831526496s" podCreationTimestamp="2025-10-08 07:30:48 +0000 UTC" firstStartedPulling="2025-10-08 07:30:50.694568811 +0000 UTC m=+836.065533746" lastFinishedPulling="2025-10-08 07:31:03.027851143 +0000 UTC m=+848.398816078" observedRunningTime="2025-10-08 07:31:04.829798354 +0000 UTC m=+850.200763289" watchObservedRunningTime="2025-10-08 07:31:04.831526496 +0000 UTC m=+850.202491431" Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.876269 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-vqp2x" podStartSLOduration=4.078871873 podStartE2EDuration="16.876254869s" podCreationTimestamp="2025-10-08 07:30:48 +0000 UTC" firstStartedPulling="2025-10-08 07:30:50.235879961 +0000 UTC m=+835.606844936" lastFinishedPulling="2025-10-08 07:31:03.033262957 +0000 UTC m=+848.404227932" observedRunningTime="2025-10-08 07:31:04.87426968 +0000 UTC m=+850.245234615" watchObservedRunningTime="2025-10-08 07:31:04.876254869 +0000 UTC m=+850.247219804" Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.888011 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-blmhk" podStartSLOduration=4.55957391 podStartE2EDuration="16.887995989s" podCreationTimestamp="2025-10-08 07:30:48 +0000 UTC" firstStartedPulling="2025-10-08 07:30:50.701924346 +0000 UTC m=+836.072889281" lastFinishedPulling="2025-10-08 07:31:03.030346425 +0000 UTC m=+848.401311360" observedRunningTime="2025-10-08 07:31:04.887183959 +0000 UTC m=+850.258148894" watchObservedRunningTime="2025-10-08 07:31:04.887995989 +0000 UTC m=+850.258960924" Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.924243 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-dmgt7" podStartSLOduration=4.132826171 podStartE2EDuration="16.924229933s" podCreationTimestamp="2025-10-08 07:30:48 +0000 UTC" firstStartedPulling="2025-10-08 07:30:50.245132666 +0000 UTC m=+835.616097601" lastFinishedPulling="2025-10-08 07:31:03.036536428 +0000 UTC m=+848.407501363" observedRunningTime="2025-10-08 07:31:04.92209718 +0000 UTC m=+850.293062115" watchObservedRunningTime="2025-10-08 07:31:04.924229933 +0000 UTC m=+850.295194868" Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.924781 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-d2ztn" podStartSLOduration=3.524410908 podStartE2EDuration="16.924776036s" podCreationTimestamp="2025-10-08 07:30:48 +0000 UTC" firstStartedPulling="2025-10-08 07:30:49.630749776 +0000 UTC m=+835.001714711" lastFinishedPulling="2025-10-08 07:31:03.031114904 +0000 UTC m=+848.402079839" observedRunningTime="2025-10-08 07:31:04.911333065 +0000 UTC m=+850.282298000" watchObservedRunningTime="2025-10-08 07:31:04.924776036 +0000 UTC m=+850.295740971" Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.964931 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-5tq42" podStartSLOduration=3.886023829 podStartE2EDuration="16.964915436s" podCreationTimestamp="2025-10-08 07:30:48 +0000 UTC" firstStartedPulling="2025-10-08 07:30:49.953290883 +0000 UTC m=+835.324255818" lastFinishedPulling="2025-10-08 07:31:03.03218249 +0000 UTC m=+848.403147425" observedRunningTime="2025-10-08 07:31:04.946930062 +0000 UTC m=+850.317894997" watchObservedRunningTime="2025-10-08 07:31:04.964915436 +0000 UTC m=+850.335880371" Oct 08 07:31:04 crc kubenswrapper[4693]: I1008 07:31:04.965067 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-54689d9f88-h2npd" podStartSLOduration=4.568630912 podStartE2EDuration="16.96506465s" podCreationTimestamp="2025-10-08 07:30:48 +0000 UTC" firstStartedPulling="2025-10-08 07:30:50.663745313 +0000 UTC m=+836.034710258" lastFinishedPulling="2025-10-08 07:31:03.060179061 +0000 UTC m=+848.431143996" observedRunningTime="2025-10-08 07:31:04.964223059 +0000 UTC m=+850.335187994" watchObservedRunningTime="2025-10-08 07:31:04.96506465 +0000 UTC m=+850.336029585" Oct 08 07:31:09 crc kubenswrapper[4693]: I1008 07:31:09.000456 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-5tq42" Oct 08 07:31:09 crc kubenswrapper[4693]: I1008 07:31:09.019478 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-d2ztn" Oct 08 07:31:09 crc kubenswrapper[4693]: I1008 07:31:09.088233 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-dmgt7" Oct 08 07:31:09 crc kubenswrapper[4693]: I1008 07:31:09.105568 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-hbqzd" Oct 08 07:31:09 crc kubenswrapper[4693]: I1008 07:31:09.118220 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-54b4974c45-h25pk" Oct 08 07:31:09 crc kubenswrapper[4693]: I1008 07:31:09.122731 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-njkdz" Oct 08 07:31:09 crc kubenswrapper[4693]: I1008 07:31:09.208696 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-vqp2x" Oct 08 07:31:09 crc kubenswrapper[4693]: I1008 07:31:09.308384 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-blmhk" Oct 08 07:31:09 crc kubenswrapper[4693]: I1008 07:31:09.321403 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-65sjq" Oct 08 07:31:09 crc kubenswrapper[4693]: I1008 07:31:09.411700 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-lhs5r" Oct 08 07:31:09 crc kubenswrapper[4693]: I1008 07:31:09.424236 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-4zvpz" Oct 08 07:31:09 crc kubenswrapper[4693]: I1008 07:31:09.451296 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-jtx9z" Oct 08 07:31:09 crc kubenswrapper[4693]: I1008 07:31:09.469660 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-54689d9f88-h2npd" Oct 08 07:31:10 crc kubenswrapper[4693]: I1008 07:31:10.718059 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-6h28f" event={"ID":"d3a3ae96-9b43-42ab-b688-95e141f326f4","Type":"ContainerStarted","Data":"37b53dd4d2135f8f01093c797e525997a0dbfbd932a7cc01c8951056a31dd236"} Oct 08 07:31:10 crc kubenswrapper[4693]: I1008 07:31:10.718674 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-6h28f" Oct 08 07:31:10 crc kubenswrapper[4693]: I1008 07:31:10.720440 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-lqlgw" event={"ID":"b2a1cce0-35c1-46ed-b375-bb70c8a7c15f","Type":"ContainerStarted","Data":"0de6e088376774f0dbb37d5e2ec9cc6bc44498c3878b6d5510b1b8e8deb0033f"} Oct 08 07:31:10 crc kubenswrapper[4693]: I1008 07:31:10.723032 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-649675d675-w96xx" event={"ID":"d96bb98a-f416-4d93-b145-37632210f2f8","Type":"ContainerStarted","Data":"9453d34ca046207bf6d26112f6915d55d3f05a16a203775f91b44cbd0eb68c49"} Oct 08 07:31:10 crc kubenswrapper[4693]: I1008 07:31:10.723199 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-649675d675-w96xx" Oct 08 07:31:10 crc kubenswrapper[4693]: I1008 07:31:10.724861 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-6vt47" event={"ID":"6fc858ec-6edd-4e45-ba44-fe2ea26a0614","Type":"ContainerStarted","Data":"81cf872df9515d27efa7913683486e4dc92864a044c46d2d98b3d69c678a3039"} Oct 08 07:31:10 crc kubenswrapper[4693]: I1008 07:31:10.725085 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-6vt47" Oct 08 07:31:10 crc kubenswrapper[4693]: I1008 07:31:10.740889 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-9bz2k" event={"ID":"f1d2f8b8-c6f2-450b-9e54-6a14b37e7a68","Type":"ContainerStarted","Data":"f6d31dd1433cf2d048f3cf9f72a21c8473ebd14af2e9f3fe18be3fbe6e53c907"} Oct 08 07:31:10 crc kubenswrapper[4693]: I1008 07:31:10.741481 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-9bz2k" Oct 08 07:31:10 crc kubenswrapper[4693]: I1008 07:31:10.742941 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-6h28f" podStartSLOduration=4.054145164 podStartE2EDuration="21.742915017s" podCreationTimestamp="2025-10-08 07:30:49 +0000 UTC" firstStartedPulling="2025-10-08 07:30:50.715762723 +0000 UTC m=+836.086727658" lastFinishedPulling="2025-10-08 07:31:08.404532576 +0000 UTC m=+853.775497511" observedRunningTime="2025-10-08 07:31:10.735237208 +0000 UTC m=+856.106202163" watchObservedRunningTime="2025-10-08 07:31:10.742915017 +0000 UTC m=+856.113879962" Oct 08 07:31:10 crc kubenswrapper[4693]: I1008 07:31:10.747167 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-567tm" event={"ID":"3fedcb35-9741-40ee-bdb0-a1d78a5da3e6","Type":"ContainerStarted","Data":"4221c658b1f139ce85096b2a5d5e192a1541e632d307cca886bfd9efcedbb305"} Oct 08 07:31:10 crc kubenswrapper[4693]: I1008 07:31:10.747739 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-567tm" Oct 08 07:31:10 crc kubenswrapper[4693]: I1008 07:31:10.750894 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-h74m5" event={"ID":"09c1a297-4a54-430a-a78e-134db76611b9","Type":"ContainerStarted","Data":"11f45e411b4d38e025a28b55aaaf76cb3866db193a79b2ccb8d537c83fde6fc6"} Oct 08 07:31:10 crc kubenswrapper[4693]: I1008 07:31:10.751128 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-h74m5" Oct 08 07:31:10 crc kubenswrapper[4693]: I1008 07:31:10.758198 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-6vt47" podStartSLOduration=3.798619621 podStartE2EDuration="22.758186704s" podCreationTimestamp="2025-10-08 07:30:48 +0000 UTC" firstStartedPulling="2025-10-08 07:30:50.727442093 +0000 UTC m=+836.098407028" lastFinishedPulling="2025-10-08 07:31:09.687009176 +0000 UTC m=+855.057974111" observedRunningTime="2025-10-08 07:31:10.755393795 +0000 UTC m=+856.126358730" watchObservedRunningTime="2025-10-08 07:31:10.758186704 +0000 UTC m=+856.129151639" Oct 08 07:31:10 crc kubenswrapper[4693]: I1008 07:31:10.773720 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-lqlgw" podStartSLOduration=2.8135711199999998 podStartE2EDuration="21.773702677s" podCreationTimestamp="2025-10-08 07:30:49 +0000 UTC" firstStartedPulling="2025-10-08 07:30:50.716195375 +0000 UTC m=+836.087160320" lastFinishedPulling="2025-10-08 07:31:09.676326942 +0000 UTC m=+855.047291877" observedRunningTime="2025-10-08 07:31:10.771277337 +0000 UTC m=+856.142242272" watchObservedRunningTime="2025-10-08 07:31:10.773702677 +0000 UTC m=+856.144667612" Oct 08 07:31:10 crc kubenswrapper[4693]: I1008 07:31:10.810629 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-649675d675-w96xx" podStartSLOduration=2.912352746 podStartE2EDuration="22.810611347s" podCreationTimestamp="2025-10-08 07:30:48 +0000 UTC" firstStartedPulling="2025-10-08 07:30:50.702313056 +0000 UTC m=+836.073277991" lastFinishedPulling="2025-10-08 07:31:10.600571637 +0000 UTC m=+855.971536592" observedRunningTime="2025-10-08 07:31:10.789435025 +0000 UTC m=+856.160399960" watchObservedRunningTime="2025-10-08 07:31:10.810611347 +0000 UTC m=+856.181576282" Oct 08 07:31:10 crc kubenswrapper[4693]: I1008 07:31:10.820681 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-h74m5" podStartSLOduration=2.859268533 podStartE2EDuration="21.820658125s" podCreationTimestamp="2025-10-08 07:30:49 +0000 UTC" firstStartedPulling="2025-10-08 07:30:50.735663891 +0000 UTC m=+836.106628826" lastFinishedPulling="2025-10-08 07:31:09.697053483 +0000 UTC m=+855.068018418" observedRunningTime="2025-10-08 07:31:10.814530384 +0000 UTC m=+856.185495339" watchObservedRunningTime="2025-10-08 07:31:10.820658125 +0000 UTC m=+856.191623060" Oct 08 07:31:10 crc kubenswrapper[4693]: I1008 07:31:10.834714 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-9bz2k" podStartSLOduration=2.8645156050000002 podStartE2EDuration="21.834684791s" podCreationTimestamp="2025-10-08 07:30:49 +0000 UTC" firstStartedPulling="2025-10-08 07:30:50.726625181 +0000 UTC m=+836.097590116" lastFinishedPulling="2025-10-08 07:31:09.696794347 +0000 UTC m=+855.067759302" observedRunningTime="2025-10-08 07:31:10.829580175 +0000 UTC m=+856.200545110" watchObservedRunningTime="2025-10-08 07:31:10.834684791 +0000 UTC m=+856.205649776" Oct 08 07:31:10 crc kubenswrapper[4693]: I1008 07:31:10.852308 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-567tm" podStartSLOduration=3.883519695 podStartE2EDuration="22.852293645s" podCreationTimestamp="2025-10-08 07:30:48 +0000 UTC" firstStartedPulling="2025-10-08 07:30:50.727602947 +0000 UTC m=+836.098567882" lastFinishedPulling="2025-10-08 07:31:09.696376897 +0000 UTC m=+855.067341832" observedRunningTime="2025-10-08 07:31:10.844247767 +0000 UTC m=+856.215212722" watchObservedRunningTime="2025-10-08 07:31:10.852293645 +0000 UTC m=+856.223258580" Oct 08 07:31:10 crc kubenswrapper[4693]: I1008 07:31:10.929751 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w" Oct 08 07:31:19 crc kubenswrapper[4693]: I1008 07:31:19.199693 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-649675d675-w96xx" Oct 08 07:31:19 crc kubenswrapper[4693]: I1008 07:31:19.386562 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-567tm" Oct 08 07:31:19 crc kubenswrapper[4693]: I1008 07:31:19.479188 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-6vt47" Oct 08 07:31:19 crc kubenswrapper[4693]: I1008 07:31:19.509983 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-6h28f" Oct 08 07:31:19 crc kubenswrapper[4693]: I1008 07:31:19.602326 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-9bz2k" Oct 08 07:31:19 crc kubenswrapper[4693]: I1008 07:31:19.660512 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-h74m5" Oct 08 07:31:21 crc kubenswrapper[4693]: I1008 07:31:21.850778 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-7sw5d" event={"ID":"78a7e32f-67dc-454f-b65c-8a8a2605d139","Type":"ContainerStarted","Data":"b599fff3898d3d8fceaffa90aac203ec3fcf3a4f208dbd31e11e4d1ae64fce98"} Oct 08 07:31:21 crc kubenswrapper[4693]: I1008 07:31:21.851710 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-7sw5d" Oct 08 07:31:21 crc kubenswrapper[4693]: I1008 07:31:21.870509 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-7sw5d" podStartSLOduration=3.368732524 podStartE2EDuration="33.870489651s" podCreationTimestamp="2025-10-08 07:30:48 +0000 UTC" firstStartedPulling="2025-10-08 07:30:50.688087499 +0000 UTC m=+836.059052454" lastFinishedPulling="2025-10-08 07:31:21.189844606 +0000 UTC m=+866.560809581" observedRunningTime="2025-10-08 07:31:21.870112842 +0000 UTC m=+867.241077807" watchObservedRunningTime="2025-10-08 07:31:21.870489651 +0000 UTC m=+867.241454616" Oct 08 07:31:23 crc kubenswrapper[4693]: I1008 07:31:23.489683 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:31:23 crc kubenswrapper[4693]: I1008 07:31:23.489742 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:31:29 crc kubenswrapper[4693]: I1008 07:31:29.492036 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-7sw5d" Oct 08 07:31:47 crc kubenswrapper[4693]: I1008 07:31:47.974439 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-m4dbh"] Oct 08 07:31:47 crc kubenswrapper[4693]: E1008 07:31:47.977972 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17985107-f4e4-4b3d-97d0-640d4d7134ea" containerName="extract-utilities" Oct 08 07:31:47 crc kubenswrapper[4693]: I1008 07:31:47.977992 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="17985107-f4e4-4b3d-97d0-640d4d7134ea" containerName="extract-utilities" Oct 08 07:31:47 crc kubenswrapper[4693]: E1008 07:31:47.978009 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17985107-f4e4-4b3d-97d0-640d4d7134ea" containerName="extract-content" Oct 08 07:31:47 crc kubenswrapper[4693]: I1008 07:31:47.978018 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="17985107-f4e4-4b3d-97d0-640d4d7134ea" containerName="extract-content" Oct 08 07:31:47 crc kubenswrapper[4693]: E1008 07:31:47.978051 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17985107-f4e4-4b3d-97d0-640d4d7134ea" containerName="registry-server" Oct 08 07:31:47 crc kubenswrapper[4693]: I1008 07:31:47.978059 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="17985107-f4e4-4b3d-97d0-640d4d7134ea" containerName="registry-server" Oct 08 07:31:47 crc kubenswrapper[4693]: I1008 07:31:47.978264 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="17985107-f4e4-4b3d-97d0-640d4d7134ea" containerName="registry-server" Oct 08 07:31:47 crc kubenswrapper[4693]: I1008 07:31:47.979355 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-m4dbh" Oct 08 07:31:47 crc kubenswrapper[4693]: I1008 07:31:47.983626 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Oct 08 07:31:47 crc kubenswrapper[4693]: I1008 07:31:47.983711 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Oct 08 07:31:47 crc kubenswrapper[4693]: I1008 07:31:47.983916 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-xkdwz" Oct 08 07:31:47 crc kubenswrapper[4693]: I1008 07:31:47.983972 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Oct 08 07:31:47 crc kubenswrapper[4693]: I1008 07:31:47.986909 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-m4dbh"] Oct 08 07:31:48 crc kubenswrapper[4693]: I1008 07:31:48.048763 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-wtj2r"] Oct 08 07:31:48 crc kubenswrapper[4693]: I1008 07:31:48.049936 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-wtj2r" Oct 08 07:31:48 crc kubenswrapper[4693]: I1008 07:31:48.054138 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Oct 08 07:31:48 crc kubenswrapper[4693]: I1008 07:31:48.075625 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-wtj2r"] Oct 08 07:31:48 crc kubenswrapper[4693]: I1008 07:31:48.099629 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zm9h2\" (UniqueName: \"kubernetes.io/projected/34179f0f-280a-4da3-a6d1-7b924b798ab2-kube-api-access-zm9h2\") pod \"dnsmasq-dns-675f4bcbfc-m4dbh\" (UID: \"34179f0f-280a-4da3-a6d1-7b924b798ab2\") " pod="openstack/dnsmasq-dns-675f4bcbfc-m4dbh" Oct 08 07:31:48 crc kubenswrapper[4693]: I1008 07:31:48.099756 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34179f0f-280a-4da3-a6d1-7b924b798ab2-config\") pod \"dnsmasq-dns-675f4bcbfc-m4dbh\" (UID: \"34179f0f-280a-4da3-a6d1-7b924b798ab2\") " pod="openstack/dnsmasq-dns-675f4bcbfc-m4dbh" Oct 08 07:31:48 crc kubenswrapper[4693]: I1008 07:31:48.200622 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zm9h2\" (UniqueName: \"kubernetes.io/projected/34179f0f-280a-4da3-a6d1-7b924b798ab2-kube-api-access-zm9h2\") pod \"dnsmasq-dns-675f4bcbfc-m4dbh\" (UID: \"34179f0f-280a-4da3-a6d1-7b924b798ab2\") " pod="openstack/dnsmasq-dns-675f4bcbfc-m4dbh" Oct 08 07:31:48 crc kubenswrapper[4693]: I1008 07:31:48.200663 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4gtp\" (UniqueName: \"kubernetes.io/projected/22f66159-7f80-41e9-8772-0c861d302ebc-kube-api-access-z4gtp\") pod \"dnsmasq-dns-78dd6ddcc-wtj2r\" (UID: \"22f66159-7f80-41e9-8772-0c861d302ebc\") " pod="openstack/dnsmasq-dns-78dd6ddcc-wtj2r" Oct 08 07:31:48 crc kubenswrapper[4693]: I1008 07:31:48.200688 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22f66159-7f80-41e9-8772-0c861d302ebc-config\") pod \"dnsmasq-dns-78dd6ddcc-wtj2r\" (UID: \"22f66159-7f80-41e9-8772-0c861d302ebc\") " pod="openstack/dnsmasq-dns-78dd6ddcc-wtj2r" Oct 08 07:31:48 crc kubenswrapper[4693]: I1008 07:31:48.200729 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34179f0f-280a-4da3-a6d1-7b924b798ab2-config\") pod \"dnsmasq-dns-675f4bcbfc-m4dbh\" (UID: \"34179f0f-280a-4da3-a6d1-7b924b798ab2\") " pod="openstack/dnsmasq-dns-675f4bcbfc-m4dbh" Oct 08 07:31:48 crc kubenswrapper[4693]: I1008 07:31:48.200748 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/22f66159-7f80-41e9-8772-0c861d302ebc-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-wtj2r\" (UID: \"22f66159-7f80-41e9-8772-0c861d302ebc\") " pod="openstack/dnsmasq-dns-78dd6ddcc-wtj2r" Oct 08 07:31:48 crc kubenswrapper[4693]: I1008 07:31:48.201506 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34179f0f-280a-4da3-a6d1-7b924b798ab2-config\") pod \"dnsmasq-dns-675f4bcbfc-m4dbh\" (UID: \"34179f0f-280a-4da3-a6d1-7b924b798ab2\") " pod="openstack/dnsmasq-dns-675f4bcbfc-m4dbh" Oct 08 07:31:48 crc kubenswrapper[4693]: I1008 07:31:48.221378 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zm9h2\" (UniqueName: \"kubernetes.io/projected/34179f0f-280a-4da3-a6d1-7b924b798ab2-kube-api-access-zm9h2\") pod \"dnsmasq-dns-675f4bcbfc-m4dbh\" (UID: \"34179f0f-280a-4da3-a6d1-7b924b798ab2\") " pod="openstack/dnsmasq-dns-675f4bcbfc-m4dbh" Oct 08 07:31:48 crc kubenswrapper[4693]: I1008 07:31:48.297265 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-m4dbh" Oct 08 07:31:48 crc kubenswrapper[4693]: I1008 07:31:48.301799 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4gtp\" (UniqueName: \"kubernetes.io/projected/22f66159-7f80-41e9-8772-0c861d302ebc-kube-api-access-z4gtp\") pod \"dnsmasq-dns-78dd6ddcc-wtj2r\" (UID: \"22f66159-7f80-41e9-8772-0c861d302ebc\") " pod="openstack/dnsmasq-dns-78dd6ddcc-wtj2r" Oct 08 07:31:48 crc kubenswrapper[4693]: I1008 07:31:48.301864 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22f66159-7f80-41e9-8772-0c861d302ebc-config\") pod \"dnsmasq-dns-78dd6ddcc-wtj2r\" (UID: \"22f66159-7f80-41e9-8772-0c861d302ebc\") " pod="openstack/dnsmasq-dns-78dd6ddcc-wtj2r" Oct 08 07:31:48 crc kubenswrapper[4693]: I1008 07:31:48.301928 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/22f66159-7f80-41e9-8772-0c861d302ebc-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-wtj2r\" (UID: \"22f66159-7f80-41e9-8772-0c861d302ebc\") " pod="openstack/dnsmasq-dns-78dd6ddcc-wtj2r" Oct 08 07:31:48 crc kubenswrapper[4693]: I1008 07:31:48.302768 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/22f66159-7f80-41e9-8772-0c861d302ebc-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-wtj2r\" (UID: \"22f66159-7f80-41e9-8772-0c861d302ebc\") " pod="openstack/dnsmasq-dns-78dd6ddcc-wtj2r" Oct 08 07:31:48 crc kubenswrapper[4693]: I1008 07:31:48.303598 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22f66159-7f80-41e9-8772-0c861d302ebc-config\") pod \"dnsmasq-dns-78dd6ddcc-wtj2r\" (UID: \"22f66159-7f80-41e9-8772-0c861d302ebc\") " pod="openstack/dnsmasq-dns-78dd6ddcc-wtj2r" Oct 08 07:31:48 crc kubenswrapper[4693]: I1008 07:31:48.319892 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4gtp\" (UniqueName: \"kubernetes.io/projected/22f66159-7f80-41e9-8772-0c861d302ebc-kube-api-access-z4gtp\") pod \"dnsmasq-dns-78dd6ddcc-wtj2r\" (UID: \"22f66159-7f80-41e9-8772-0c861d302ebc\") " pod="openstack/dnsmasq-dns-78dd6ddcc-wtj2r" Oct 08 07:31:48 crc kubenswrapper[4693]: I1008 07:31:48.366396 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-wtj2r" Oct 08 07:31:48 crc kubenswrapper[4693]: I1008 07:31:48.552013 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-m4dbh"] Oct 08 07:31:48 crc kubenswrapper[4693]: I1008 07:31:48.558925 4693 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 08 07:31:48 crc kubenswrapper[4693]: I1008 07:31:48.622843 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-wtj2r"] Oct 08 07:31:48 crc kubenswrapper[4693]: W1008 07:31:48.637504 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod22f66159_7f80_41e9_8772_0c861d302ebc.slice/crio-b69719aa360b2bce55aad2c4932bb04e0ed1c34699c8cf9a617e7968bf6a4209 WatchSource:0}: Error finding container b69719aa360b2bce55aad2c4932bb04e0ed1c34699c8cf9a617e7968bf6a4209: Status 404 returned error can't find the container with id b69719aa360b2bce55aad2c4932bb04e0ed1c34699c8cf9a617e7968bf6a4209 Oct 08 07:31:49 crc kubenswrapper[4693]: I1008 07:31:49.102171 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-wtj2r" event={"ID":"22f66159-7f80-41e9-8772-0c861d302ebc","Type":"ContainerStarted","Data":"b69719aa360b2bce55aad2c4932bb04e0ed1c34699c8cf9a617e7968bf6a4209"} Oct 08 07:31:49 crc kubenswrapper[4693]: I1008 07:31:49.103997 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-m4dbh" event={"ID":"34179f0f-280a-4da3-a6d1-7b924b798ab2","Type":"ContainerStarted","Data":"31638e4f7ee80ff0f01563f64ee6ea28194739c2675149f520b4674bf917c8da"} Oct 08 07:31:49 crc kubenswrapper[4693]: I1008 07:31:49.570177 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-m4dbh"] Oct 08 07:31:49 crc kubenswrapper[4693]: I1008 07:31:49.606905 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-pd96z"] Oct 08 07:31:49 crc kubenswrapper[4693]: I1008 07:31:49.607925 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-pd96z" Oct 08 07:31:49 crc kubenswrapper[4693]: I1008 07:31:49.625084 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-pd96z"] Oct 08 07:31:49 crc kubenswrapper[4693]: I1008 07:31:49.728350 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204-dns-svc\") pod \"dnsmasq-dns-666b6646f7-pd96z\" (UID: \"e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204\") " pod="openstack/dnsmasq-dns-666b6646f7-pd96z" Oct 08 07:31:49 crc kubenswrapper[4693]: I1008 07:31:49.728472 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdp5f\" (UniqueName: \"kubernetes.io/projected/e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204-kube-api-access-hdp5f\") pod \"dnsmasq-dns-666b6646f7-pd96z\" (UID: \"e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204\") " pod="openstack/dnsmasq-dns-666b6646f7-pd96z" Oct 08 07:31:49 crc kubenswrapper[4693]: I1008 07:31:49.728504 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204-config\") pod \"dnsmasq-dns-666b6646f7-pd96z\" (UID: \"e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204\") " pod="openstack/dnsmasq-dns-666b6646f7-pd96z" Oct 08 07:31:49 crc kubenswrapper[4693]: I1008 07:31:49.829751 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204-config\") pod \"dnsmasq-dns-666b6646f7-pd96z\" (UID: \"e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204\") " pod="openstack/dnsmasq-dns-666b6646f7-pd96z" Oct 08 07:31:49 crc kubenswrapper[4693]: I1008 07:31:49.829857 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204-dns-svc\") pod \"dnsmasq-dns-666b6646f7-pd96z\" (UID: \"e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204\") " pod="openstack/dnsmasq-dns-666b6646f7-pd96z" Oct 08 07:31:49 crc kubenswrapper[4693]: I1008 07:31:49.829894 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdp5f\" (UniqueName: \"kubernetes.io/projected/e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204-kube-api-access-hdp5f\") pod \"dnsmasq-dns-666b6646f7-pd96z\" (UID: \"e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204\") " pod="openstack/dnsmasq-dns-666b6646f7-pd96z" Oct 08 07:31:49 crc kubenswrapper[4693]: I1008 07:31:49.832964 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204-dns-svc\") pod \"dnsmasq-dns-666b6646f7-pd96z\" (UID: \"e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204\") " pod="openstack/dnsmasq-dns-666b6646f7-pd96z" Oct 08 07:31:49 crc kubenswrapper[4693]: I1008 07:31:49.836553 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204-config\") pod \"dnsmasq-dns-666b6646f7-pd96z\" (UID: \"e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204\") " pod="openstack/dnsmasq-dns-666b6646f7-pd96z" Oct 08 07:31:49 crc kubenswrapper[4693]: I1008 07:31:49.866411 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdp5f\" (UniqueName: \"kubernetes.io/projected/e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204-kube-api-access-hdp5f\") pod \"dnsmasq-dns-666b6646f7-pd96z\" (UID: \"e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204\") " pod="openstack/dnsmasq-dns-666b6646f7-pd96z" Oct 08 07:31:49 crc kubenswrapper[4693]: I1008 07:31:49.928786 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-wtj2r"] Oct 08 07:31:49 crc kubenswrapper[4693]: I1008 07:31:49.936423 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-pd96z" Oct 08 07:31:49 crc kubenswrapper[4693]: I1008 07:31:49.951021 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-r8hm9"] Oct 08 07:31:49 crc kubenswrapper[4693]: I1008 07:31:49.952240 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-r8hm9" Oct 08 07:31:49 crc kubenswrapper[4693]: I1008 07:31:49.963281 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-r8hm9"] Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.033798 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wf965\" (UniqueName: \"kubernetes.io/projected/4b35ec4a-da73-4069-b14e-341b56e5718f-kube-api-access-wf965\") pod \"dnsmasq-dns-57d769cc4f-r8hm9\" (UID: \"4b35ec4a-da73-4069-b14e-341b56e5718f\") " pod="openstack/dnsmasq-dns-57d769cc4f-r8hm9" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.034060 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b35ec4a-da73-4069-b14e-341b56e5718f-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-r8hm9\" (UID: \"4b35ec4a-da73-4069-b14e-341b56e5718f\") " pod="openstack/dnsmasq-dns-57d769cc4f-r8hm9" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.034115 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b35ec4a-da73-4069-b14e-341b56e5718f-config\") pod \"dnsmasq-dns-57d769cc4f-r8hm9\" (UID: \"4b35ec4a-da73-4069-b14e-341b56e5718f\") " pod="openstack/dnsmasq-dns-57d769cc4f-r8hm9" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.135974 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wf965\" (UniqueName: \"kubernetes.io/projected/4b35ec4a-da73-4069-b14e-341b56e5718f-kube-api-access-wf965\") pod \"dnsmasq-dns-57d769cc4f-r8hm9\" (UID: \"4b35ec4a-da73-4069-b14e-341b56e5718f\") " pod="openstack/dnsmasq-dns-57d769cc4f-r8hm9" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.136015 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b35ec4a-da73-4069-b14e-341b56e5718f-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-r8hm9\" (UID: \"4b35ec4a-da73-4069-b14e-341b56e5718f\") " pod="openstack/dnsmasq-dns-57d769cc4f-r8hm9" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.136055 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b35ec4a-da73-4069-b14e-341b56e5718f-config\") pod \"dnsmasq-dns-57d769cc4f-r8hm9\" (UID: \"4b35ec4a-da73-4069-b14e-341b56e5718f\") " pod="openstack/dnsmasq-dns-57d769cc4f-r8hm9" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.136858 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b35ec4a-da73-4069-b14e-341b56e5718f-config\") pod \"dnsmasq-dns-57d769cc4f-r8hm9\" (UID: \"4b35ec4a-da73-4069-b14e-341b56e5718f\") " pod="openstack/dnsmasq-dns-57d769cc4f-r8hm9" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.136896 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b35ec4a-da73-4069-b14e-341b56e5718f-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-r8hm9\" (UID: \"4b35ec4a-da73-4069-b14e-341b56e5718f\") " pod="openstack/dnsmasq-dns-57d769cc4f-r8hm9" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.155941 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wf965\" (UniqueName: \"kubernetes.io/projected/4b35ec4a-da73-4069-b14e-341b56e5718f-kube-api-access-wf965\") pod \"dnsmasq-dns-57d769cc4f-r8hm9\" (UID: \"4b35ec4a-da73-4069-b14e-341b56e5718f\") " pod="openstack/dnsmasq-dns-57d769cc4f-r8hm9" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.288359 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-r8hm9" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.458106 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-pd96z"] Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.790704 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.791889 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.794077 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.794158 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-rrkgx" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.795059 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.795197 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.795833 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.795856 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.797775 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.807111 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.837005 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-r8hm9"] Oct 08 07:31:50 crc kubenswrapper[4693]: W1008 07:31:50.844175 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4b35ec4a_da73_4069_b14e_341b56e5718f.slice/crio-2be601f45da6d1679237ce1dd71d5c8fbd0b776be525e086c193cb0649e52d75 WatchSource:0}: Error finding container 2be601f45da6d1679237ce1dd71d5c8fbd0b776be525e086c193cb0649e52d75: Status 404 returned error can't find the container with id 2be601f45da6d1679237ce1dd71d5c8fbd0b776be525e086c193cb0649e52d75 Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.868041 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/344b4125-6848-4985-b722-8e9e589b1ab4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.868120 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/344b4125-6848-4985-b722-8e9e589b1ab4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.868148 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/344b4125-6848-4985-b722-8e9e589b1ab4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.868164 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/344b4125-6848-4985-b722-8e9e589b1ab4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.868196 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/344b4125-6848-4985-b722-8e9e589b1ab4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.868338 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldmwx\" (UniqueName: \"kubernetes.io/projected/344b4125-6848-4985-b722-8e9e589b1ab4-kube-api-access-ldmwx\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.868426 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.868517 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/344b4125-6848-4985-b722-8e9e589b1ab4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.868542 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/344b4125-6848-4985-b722-8e9e589b1ab4-config-data\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.868563 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/344b4125-6848-4985-b722-8e9e589b1ab4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.868621 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/344b4125-6848-4985-b722-8e9e589b1ab4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.970922 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/344b4125-6848-4985-b722-8e9e589b1ab4-config-data\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.970958 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/344b4125-6848-4985-b722-8e9e589b1ab4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.970988 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/344b4125-6848-4985-b722-8e9e589b1ab4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.971010 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/344b4125-6848-4985-b722-8e9e589b1ab4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.971045 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/344b4125-6848-4985-b722-8e9e589b1ab4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.971079 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/344b4125-6848-4985-b722-8e9e589b1ab4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.971096 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/344b4125-6848-4985-b722-8e9e589b1ab4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.971113 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/344b4125-6848-4985-b722-8e9e589b1ab4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.971145 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/344b4125-6848-4985-b722-8e9e589b1ab4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.971166 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldmwx\" (UniqueName: \"kubernetes.io/projected/344b4125-6848-4985-b722-8e9e589b1ab4-kube-api-access-ldmwx\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.971188 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.971501 4693 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.971886 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/344b4125-6848-4985-b722-8e9e589b1ab4-config-data\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.972089 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/344b4125-6848-4985-b722-8e9e589b1ab4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.972500 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/344b4125-6848-4985-b722-8e9e589b1ab4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.972624 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/344b4125-6848-4985-b722-8e9e589b1ab4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.972739 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/344b4125-6848-4985-b722-8e9e589b1ab4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.978788 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/344b4125-6848-4985-b722-8e9e589b1ab4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.979259 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/344b4125-6848-4985-b722-8e9e589b1ab4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.988176 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/344b4125-6848-4985-b722-8e9e589b1ab4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.988448 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldmwx\" (UniqueName: \"kubernetes.io/projected/344b4125-6848-4985-b722-8e9e589b1ab4-kube-api-access-ldmwx\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:50 crc kubenswrapper[4693]: I1008 07:31:50.990489 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/344b4125-6848-4985-b722-8e9e589b1ab4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.009968 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " pod="openstack/rabbitmq-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.082660 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.083915 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.086968 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.101721 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-dk6wq" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.101914 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.102007 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.102149 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.102548 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.102833 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.110433 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.119776 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-r8hm9" event={"ID":"4b35ec4a-da73-4069-b14e-341b56e5718f","Type":"ContainerStarted","Data":"2be601f45da6d1679237ce1dd71d5c8fbd0b776be525e086c193cb0649e52d75"} Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.121237 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-pd96z" event={"ID":"e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204","Type":"ContainerStarted","Data":"e479e873aa5d2d5d68361a17920f0696aebe2f8e37c9e6e8191ca76bc5b5d9e1"} Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.143005 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.173708 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cc3c1ad2-7355-4db4-af71-27c3454a025c-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.174186 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cc3c1ad2-7355-4db4-af71-27c3454a025c-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.174216 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.174238 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cc3c1ad2-7355-4db4-af71-27c3454a025c-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.174262 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cc3c1ad2-7355-4db4-af71-27c3454a025c-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.174286 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgk8m\" (UniqueName: \"kubernetes.io/projected/cc3c1ad2-7355-4db4-af71-27c3454a025c-kube-api-access-jgk8m\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.174310 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cc3c1ad2-7355-4db4-af71-27c3454a025c-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.174471 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cc3c1ad2-7355-4db4-af71-27c3454a025c-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.174741 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cc3c1ad2-7355-4db4-af71-27c3454a025c-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.174787 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cc3c1ad2-7355-4db4-af71-27c3454a025c-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.174876 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cc3c1ad2-7355-4db4-af71-27c3454a025c-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.279496 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cc3c1ad2-7355-4db4-af71-27c3454a025c-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.279535 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cc3c1ad2-7355-4db4-af71-27c3454a025c-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.279560 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cc3c1ad2-7355-4db4-af71-27c3454a025c-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.279590 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cc3c1ad2-7355-4db4-af71-27c3454a025c-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.279623 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cc3c1ad2-7355-4db4-af71-27c3454a025c-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.279648 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.279664 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cc3c1ad2-7355-4db4-af71-27c3454a025c-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.279689 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cc3c1ad2-7355-4db4-af71-27c3454a025c-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.280070 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cc3c1ad2-7355-4db4-af71-27c3454a025c-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.280110 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cc3c1ad2-7355-4db4-af71-27c3454a025c-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.280114 4693 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.280156 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgk8m\" (UniqueName: \"kubernetes.io/projected/cc3c1ad2-7355-4db4-af71-27c3454a025c-kube-api-access-jgk8m\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.280235 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cc3c1ad2-7355-4db4-af71-27c3454a025c-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.280285 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cc3c1ad2-7355-4db4-af71-27c3454a025c-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.281024 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cc3c1ad2-7355-4db4-af71-27c3454a025c-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.282389 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cc3c1ad2-7355-4db4-af71-27c3454a025c-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.282414 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cc3c1ad2-7355-4db4-af71-27c3454a025c-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.291701 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cc3c1ad2-7355-4db4-af71-27c3454a025c-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.293280 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cc3c1ad2-7355-4db4-af71-27c3454a025c-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.298561 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgk8m\" (UniqueName: \"kubernetes.io/projected/cc3c1ad2-7355-4db4-af71-27c3454a025c-kube-api-access-jgk8m\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.299473 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cc3c1ad2-7355-4db4-af71-27c3454a025c-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.302184 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cc3c1ad2-7355-4db4-af71-27c3454a025c-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.304958 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.471978 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.672498 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 08 07:31:51 crc kubenswrapper[4693]: I1008 07:31:51.766702 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 08 07:31:52 crc kubenswrapper[4693]: I1008 07:31:52.130168 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"344b4125-6848-4985-b722-8e9e589b1ab4","Type":"ContainerStarted","Data":"13469022dc950a145e4d8c3dc49a996a13fc12936fa60f1e6de9e9e27bca0f32"} Oct 08 07:31:52 crc kubenswrapper[4693]: I1008 07:31:52.132435 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cc3c1ad2-7355-4db4-af71-27c3454a025c","Type":"ContainerStarted","Data":"62bbeea0cfe67b31b756797f380232d1cc991b442a5b9a18190cff44b372edce"} Oct 08 07:31:52 crc kubenswrapper[4693]: I1008 07:31:52.814872 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Oct 08 07:31:52 crc kubenswrapper[4693]: I1008 07:31:52.818434 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 08 07:31:52 crc kubenswrapper[4693]: I1008 07:31:52.821443 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Oct 08 07:31:52 crc kubenswrapper[4693]: I1008 07:31:52.821938 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Oct 08 07:31:52 crc kubenswrapper[4693]: I1008 07:31:52.821958 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-j6rqm" Oct 08 07:31:52 crc kubenswrapper[4693]: I1008 07:31:52.822080 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Oct 08 07:31:52 crc kubenswrapper[4693]: I1008 07:31:52.822496 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Oct 08 07:31:52 crc kubenswrapper[4693]: I1008 07:31:52.830029 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Oct 08 07:31:52 crc kubenswrapper[4693]: I1008 07:31:52.832945 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 08 07:31:52 crc kubenswrapper[4693]: I1008 07:31:52.905674 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vg5lg\" (UniqueName: \"kubernetes.io/projected/1aa3187a-5fce-4486-a846-709a6231383f-kube-api-access-vg5lg\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") " pod="openstack/openstack-galera-0" Oct 08 07:31:52 crc kubenswrapper[4693]: I1008 07:31:52.905728 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1aa3187a-5fce-4486-a846-709a6231383f-operator-scripts\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") " pod="openstack/openstack-galera-0" Oct 08 07:31:52 crc kubenswrapper[4693]: I1008 07:31:52.905765 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") " pod="openstack/openstack-galera-0" Oct 08 07:31:52 crc kubenswrapper[4693]: I1008 07:31:52.905794 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/1aa3187a-5fce-4486-a846-709a6231383f-config-data-generated\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") " pod="openstack/openstack-galera-0" Oct 08 07:31:52 crc kubenswrapper[4693]: I1008 07:31:52.905836 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1aa3187a-5fce-4486-a846-709a6231383f-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") " pod="openstack/openstack-galera-0" Oct 08 07:31:52 crc kubenswrapper[4693]: I1008 07:31:52.905970 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/1aa3187a-5fce-4486-a846-709a6231383f-secrets\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") " pod="openstack/openstack-galera-0" Oct 08 07:31:52 crc kubenswrapper[4693]: I1008 07:31:52.906070 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1aa3187a-5fce-4486-a846-709a6231383f-kolla-config\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") " pod="openstack/openstack-galera-0" Oct 08 07:31:52 crc kubenswrapper[4693]: I1008 07:31:52.906311 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/1aa3187a-5fce-4486-a846-709a6231383f-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") " pod="openstack/openstack-galera-0" Oct 08 07:31:52 crc kubenswrapper[4693]: I1008 07:31:52.906355 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/1aa3187a-5fce-4486-a846-709a6231383f-config-data-default\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") " pod="openstack/openstack-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.007651 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/1aa3187a-5fce-4486-a846-709a6231383f-config-data-generated\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") " pod="openstack/openstack-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.007714 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1aa3187a-5fce-4486-a846-709a6231383f-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") " pod="openstack/openstack-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.007747 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/1aa3187a-5fce-4486-a846-709a6231383f-secrets\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") " pod="openstack/openstack-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.007793 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1aa3187a-5fce-4486-a846-709a6231383f-kolla-config\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") " pod="openstack/openstack-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.007825 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/1aa3187a-5fce-4486-a846-709a6231383f-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") " pod="openstack/openstack-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.007847 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/1aa3187a-5fce-4486-a846-709a6231383f-config-data-default\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") " pod="openstack/openstack-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.007873 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vg5lg\" (UniqueName: \"kubernetes.io/projected/1aa3187a-5fce-4486-a846-709a6231383f-kube-api-access-vg5lg\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") " pod="openstack/openstack-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.007896 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1aa3187a-5fce-4486-a846-709a6231383f-operator-scripts\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") " pod="openstack/openstack-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.008376 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") " pod="openstack/openstack-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.008415 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/1aa3187a-5fce-4486-a846-709a6231383f-config-data-generated\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") " pod="openstack/openstack-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.008682 4693 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.008965 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/1aa3187a-5fce-4486-a846-709a6231383f-config-data-default\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") " pod="openstack/openstack-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.009595 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1aa3187a-5fce-4486-a846-709a6231383f-operator-scripts\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") " pod="openstack/openstack-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.013731 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1aa3187a-5fce-4486-a846-709a6231383f-kolla-config\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") " pod="openstack/openstack-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.021191 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/1aa3187a-5fce-4486-a846-709a6231383f-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") " pod="openstack/openstack-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.021563 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1aa3187a-5fce-4486-a846-709a6231383f-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") " pod="openstack/openstack-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.023746 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/1aa3187a-5fce-4486-a846-709a6231383f-secrets\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") " pod="openstack/openstack-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.025142 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vg5lg\" (UniqueName: \"kubernetes.io/projected/1aa3187a-5fce-4486-a846-709a6231383f-kube-api-access-vg5lg\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") " pod="openstack/openstack-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.043575 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"1aa3187a-5fce-4486-a846-709a6231383f\") " pod="openstack/openstack-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.144085 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.489718 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.489778 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.833226 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.834975 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.837135 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.837394 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.838762 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-xf2pj" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.838921 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.840855 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.921030 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c4cf2123-362f-4d9f-8080-bd9d6e13de17-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") " pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.921082 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/c4cf2123-362f-4d9f-8080-bd9d6e13de17-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") " pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.921148 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4cf2123-362f-4d9f-8080-bd9d6e13de17-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") " pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.921180 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c4cf2123-362f-4d9f-8080-bd9d6e13de17-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") " pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.921231 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h287d\" (UniqueName: \"kubernetes.io/projected/c4cf2123-362f-4d9f-8080-bd9d6e13de17-kube-api-access-h287d\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") " pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.921248 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4cf2123-362f-4d9f-8080-bd9d6e13de17-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") " pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.921338 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c4cf2123-362f-4d9f-8080-bd9d6e13de17-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") " pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.921542 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") " pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:53 crc kubenswrapper[4693]: I1008 07:31:53.921697 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4cf2123-362f-4d9f-8080-bd9d6e13de17-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") " pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.023259 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4cf2123-362f-4d9f-8080-bd9d6e13de17-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") " pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.023304 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c4cf2123-362f-4d9f-8080-bd9d6e13de17-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") " pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.023326 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/c4cf2123-362f-4d9f-8080-bd9d6e13de17-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") " pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.023399 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4cf2123-362f-4d9f-8080-bd9d6e13de17-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") " pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.023430 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c4cf2123-362f-4d9f-8080-bd9d6e13de17-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") " pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.023486 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4cf2123-362f-4d9f-8080-bd9d6e13de17-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") " pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.023501 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h287d\" (UniqueName: \"kubernetes.io/projected/c4cf2123-362f-4d9f-8080-bd9d6e13de17-kube-api-access-h287d\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") " pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.023541 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c4cf2123-362f-4d9f-8080-bd9d6e13de17-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") " pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.023573 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") " pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.023708 4693 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.025749 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c4cf2123-362f-4d9f-8080-bd9d6e13de17-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") " pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.029563 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c4cf2123-362f-4d9f-8080-bd9d6e13de17-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") " pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.029643 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4cf2123-362f-4d9f-8080-bd9d6e13de17-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") " pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.036916 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4cf2123-362f-4d9f-8080-bd9d6e13de17-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") " pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.040771 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c4cf2123-362f-4d9f-8080-bd9d6e13de17-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") " pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.040842 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4cf2123-362f-4d9f-8080-bd9d6e13de17-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") " pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.044185 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h287d\" (UniqueName: \"kubernetes.io/projected/c4cf2123-362f-4d9f-8080-bd9d6e13de17-kube-api-access-h287d\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") " pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.050668 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/c4cf2123-362f-4d9f-8080-bd9d6e13de17-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") " pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.067248 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"c4cf2123-362f-4d9f-8080-bd9d6e13de17\") " pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.187759 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.197634 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.198660 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.200742 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-hf7fr" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.201153 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.201286 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.203666 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.226712 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/225a20e0-eec7-4b8c-89e1-b4a2ebb513a3-config-data\") pod \"memcached-0\" (UID: \"225a20e0-eec7-4b8c-89e1-b4a2ebb513a3\") " pod="openstack/memcached-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.226794 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/225a20e0-eec7-4b8c-89e1-b4a2ebb513a3-memcached-tls-certs\") pod \"memcached-0\" (UID: \"225a20e0-eec7-4b8c-89e1-b4a2ebb513a3\") " pod="openstack/memcached-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.227107 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/225a20e0-eec7-4b8c-89e1-b4a2ebb513a3-kolla-config\") pod \"memcached-0\" (UID: \"225a20e0-eec7-4b8c-89e1-b4a2ebb513a3\") " pod="openstack/memcached-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.227161 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/225a20e0-eec7-4b8c-89e1-b4a2ebb513a3-combined-ca-bundle\") pod \"memcached-0\" (UID: \"225a20e0-eec7-4b8c-89e1-b4a2ebb513a3\") " pod="openstack/memcached-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.227180 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fc8v\" (UniqueName: \"kubernetes.io/projected/225a20e0-eec7-4b8c-89e1-b4a2ebb513a3-kube-api-access-8fc8v\") pod \"memcached-0\" (UID: \"225a20e0-eec7-4b8c-89e1-b4a2ebb513a3\") " pod="openstack/memcached-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.328926 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/225a20e0-eec7-4b8c-89e1-b4a2ebb513a3-kolla-config\") pod \"memcached-0\" (UID: \"225a20e0-eec7-4b8c-89e1-b4a2ebb513a3\") " pod="openstack/memcached-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.329000 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/225a20e0-eec7-4b8c-89e1-b4a2ebb513a3-combined-ca-bundle\") pod \"memcached-0\" (UID: \"225a20e0-eec7-4b8c-89e1-b4a2ebb513a3\") " pod="openstack/memcached-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.329021 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fc8v\" (UniqueName: \"kubernetes.io/projected/225a20e0-eec7-4b8c-89e1-b4a2ebb513a3-kube-api-access-8fc8v\") pod \"memcached-0\" (UID: \"225a20e0-eec7-4b8c-89e1-b4a2ebb513a3\") " pod="openstack/memcached-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.329061 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/225a20e0-eec7-4b8c-89e1-b4a2ebb513a3-config-data\") pod \"memcached-0\" (UID: \"225a20e0-eec7-4b8c-89e1-b4a2ebb513a3\") " pod="openstack/memcached-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.329180 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/225a20e0-eec7-4b8c-89e1-b4a2ebb513a3-memcached-tls-certs\") pod \"memcached-0\" (UID: \"225a20e0-eec7-4b8c-89e1-b4a2ebb513a3\") " pod="openstack/memcached-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.330031 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/225a20e0-eec7-4b8c-89e1-b4a2ebb513a3-kolla-config\") pod \"memcached-0\" (UID: \"225a20e0-eec7-4b8c-89e1-b4a2ebb513a3\") " pod="openstack/memcached-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.331011 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/225a20e0-eec7-4b8c-89e1-b4a2ebb513a3-config-data\") pod \"memcached-0\" (UID: \"225a20e0-eec7-4b8c-89e1-b4a2ebb513a3\") " pod="openstack/memcached-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.335575 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/225a20e0-eec7-4b8c-89e1-b4a2ebb513a3-memcached-tls-certs\") pod \"memcached-0\" (UID: \"225a20e0-eec7-4b8c-89e1-b4a2ebb513a3\") " pod="openstack/memcached-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.335869 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/225a20e0-eec7-4b8c-89e1-b4a2ebb513a3-combined-ca-bundle\") pod \"memcached-0\" (UID: \"225a20e0-eec7-4b8c-89e1-b4a2ebb513a3\") " pod="openstack/memcached-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.344424 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fc8v\" (UniqueName: \"kubernetes.io/projected/225a20e0-eec7-4b8c-89e1-b4a2ebb513a3-kube-api-access-8fc8v\") pod \"memcached-0\" (UID: \"225a20e0-eec7-4b8c-89e1-b4a2ebb513a3\") " pod="openstack/memcached-0" Oct 08 07:31:54 crc kubenswrapper[4693]: I1008 07:31:54.537291 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 08 07:31:55 crc kubenswrapper[4693]: I1008 07:31:55.945226 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 08 07:31:55 crc kubenswrapper[4693]: I1008 07:31:55.946626 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 08 07:31:55 crc kubenswrapper[4693]: I1008 07:31:55.950033 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-9z87s" Oct 08 07:31:55 crc kubenswrapper[4693]: I1008 07:31:55.967281 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 08 07:31:56 crc kubenswrapper[4693]: I1008 07:31:56.063400 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xggg\" (UniqueName: \"kubernetes.io/projected/8a58b116-77cf-41d3-87cd-99880a4db87f-kube-api-access-8xggg\") pod \"kube-state-metrics-0\" (UID: \"8a58b116-77cf-41d3-87cd-99880a4db87f\") " pod="openstack/kube-state-metrics-0" Oct 08 07:31:56 crc kubenswrapper[4693]: I1008 07:31:56.164148 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xggg\" (UniqueName: \"kubernetes.io/projected/8a58b116-77cf-41d3-87cd-99880a4db87f-kube-api-access-8xggg\") pod \"kube-state-metrics-0\" (UID: \"8a58b116-77cf-41d3-87cd-99880a4db87f\") " pod="openstack/kube-state-metrics-0" Oct 08 07:31:56 crc kubenswrapper[4693]: I1008 07:31:56.181941 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xggg\" (UniqueName: \"kubernetes.io/projected/8a58b116-77cf-41d3-87cd-99880a4db87f-kube-api-access-8xggg\") pod \"kube-state-metrics-0\" (UID: \"8a58b116-77cf-41d3-87cd-99880a4db87f\") " pod="openstack/kube-state-metrics-0" Oct 08 07:31:56 crc kubenswrapper[4693]: I1008 07:31:56.263710 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.681994 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-qmltj"] Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.683398 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-qmltj" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.685795 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.687784 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.688041 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-b8mgc" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.692770 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-qmltj"] Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.698954 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-5mdkq"] Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.700936 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-5mdkq" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.717994 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/391f1e55-ea6a-4d2f-ae2a-08adfad94698-scripts\") pod \"ovn-controller-ovs-5mdkq\" (UID: \"391f1e55-ea6a-4d2f-ae2a-08adfad94698\") " pod="openstack/ovn-controller-ovs-5mdkq" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.718061 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d-combined-ca-bundle\") pod \"ovn-controller-qmltj\" (UID: \"5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d\") " pod="openstack/ovn-controller-qmltj" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.718091 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/391f1e55-ea6a-4d2f-ae2a-08adfad94698-var-run\") pod \"ovn-controller-ovs-5mdkq\" (UID: \"391f1e55-ea6a-4d2f-ae2a-08adfad94698\") " pod="openstack/ovn-controller-ovs-5mdkq" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.718139 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d-scripts\") pod \"ovn-controller-qmltj\" (UID: \"5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d\") " pod="openstack/ovn-controller-qmltj" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.718198 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d-var-run-ovn\") pod \"ovn-controller-qmltj\" (UID: \"5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d\") " pod="openstack/ovn-controller-qmltj" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.718242 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7vs4\" (UniqueName: \"kubernetes.io/projected/5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d-kube-api-access-t7vs4\") pod \"ovn-controller-qmltj\" (UID: \"5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d\") " pod="openstack/ovn-controller-qmltj" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.718268 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/391f1e55-ea6a-4d2f-ae2a-08adfad94698-var-lib\") pod \"ovn-controller-ovs-5mdkq\" (UID: \"391f1e55-ea6a-4d2f-ae2a-08adfad94698\") " pod="openstack/ovn-controller-ovs-5mdkq" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.718303 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/391f1e55-ea6a-4d2f-ae2a-08adfad94698-etc-ovs\") pod \"ovn-controller-ovs-5mdkq\" (UID: \"391f1e55-ea6a-4d2f-ae2a-08adfad94698\") " pod="openstack/ovn-controller-ovs-5mdkq" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.718341 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d-ovn-controller-tls-certs\") pod \"ovn-controller-qmltj\" (UID: \"5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d\") " pod="openstack/ovn-controller-qmltj" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.718371 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slgc6\" (UniqueName: \"kubernetes.io/projected/391f1e55-ea6a-4d2f-ae2a-08adfad94698-kube-api-access-slgc6\") pod \"ovn-controller-ovs-5mdkq\" (UID: \"391f1e55-ea6a-4d2f-ae2a-08adfad94698\") " pod="openstack/ovn-controller-ovs-5mdkq" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.718398 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d-var-log-ovn\") pod \"ovn-controller-qmltj\" (UID: \"5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d\") " pod="openstack/ovn-controller-qmltj" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.718452 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d-var-run\") pod \"ovn-controller-qmltj\" (UID: \"5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d\") " pod="openstack/ovn-controller-qmltj" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.718476 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/391f1e55-ea6a-4d2f-ae2a-08adfad94698-var-log\") pod \"ovn-controller-ovs-5mdkq\" (UID: \"391f1e55-ea6a-4d2f-ae2a-08adfad94698\") " pod="openstack/ovn-controller-ovs-5mdkq" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.723691 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-5mdkq"] Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.819320 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slgc6\" (UniqueName: \"kubernetes.io/projected/391f1e55-ea6a-4d2f-ae2a-08adfad94698-kube-api-access-slgc6\") pod \"ovn-controller-ovs-5mdkq\" (UID: \"391f1e55-ea6a-4d2f-ae2a-08adfad94698\") " pod="openstack/ovn-controller-ovs-5mdkq" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.819368 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d-var-log-ovn\") pod \"ovn-controller-qmltj\" (UID: \"5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d\") " pod="openstack/ovn-controller-qmltj" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.819414 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d-var-run\") pod \"ovn-controller-qmltj\" (UID: \"5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d\") " pod="openstack/ovn-controller-qmltj" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.819433 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/391f1e55-ea6a-4d2f-ae2a-08adfad94698-var-log\") pod \"ovn-controller-ovs-5mdkq\" (UID: \"391f1e55-ea6a-4d2f-ae2a-08adfad94698\") " pod="openstack/ovn-controller-ovs-5mdkq" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.819461 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/391f1e55-ea6a-4d2f-ae2a-08adfad94698-scripts\") pod \"ovn-controller-ovs-5mdkq\" (UID: \"391f1e55-ea6a-4d2f-ae2a-08adfad94698\") " pod="openstack/ovn-controller-ovs-5mdkq" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.819486 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d-combined-ca-bundle\") pod \"ovn-controller-qmltj\" (UID: \"5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d\") " pod="openstack/ovn-controller-qmltj" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.819505 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/391f1e55-ea6a-4d2f-ae2a-08adfad94698-var-run\") pod \"ovn-controller-ovs-5mdkq\" (UID: \"391f1e55-ea6a-4d2f-ae2a-08adfad94698\") " pod="openstack/ovn-controller-ovs-5mdkq" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.819535 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d-scripts\") pod \"ovn-controller-qmltj\" (UID: \"5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d\") " pod="openstack/ovn-controller-qmltj" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.819575 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d-var-run-ovn\") pod \"ovn-controller-qmltj\" (UID: \"5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d\") " pod="openstack/ovn-controller-qmltj" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.819610 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7vs4\" (UniqueName: \"kubernetes.io/projected/5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d-kube-api-access-t7vs4\") pod \"ovn-controller-qmltj\" (UID: \"5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d\") " pod="openstack/ovn-controller-qmltj" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.819634 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/391f1e55-ea6a-4d2f-ae2a-08adfad94698-var-lib\") pod \"ovn-controller-ovs-5mdkq\" (UID: \"391f1e55-ea6a-4d2f-ae2a-08adfad94698\") " pod="openstack/ovn-controller-ovs-5mdkq" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.819656 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/391f1e55-ea6a-4d2f-ae2a-08adfad94698-etc-ovs\") pod \"ovn-controller-ovs-5mdkq\" (UID: \"391f1e55-ea6a-4d2f-ae2a-08adfad94698\") " pod="openstack/ovn-controller-ovs-5mdkq" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.819686 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d-ovn-controller-tls-certs\") pod \"ovn-controller-qmltj\" (UID: \"5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d\") " pod="openstack/ovn-controller-qmltj" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.820321 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/391f1e55-ea6a-4d2f-ae2a-08adfad94698-var-log\") pod \"ovn-controller-ovs-5mdkq\" (UID: \"391f1e55-ea6a-4d2f-ae2a-08adfad94698\") " pod="openstack/ovn-controller-ovs-5mdkq" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.820732 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/391f1e55-ea6a-4d2f-ae2a-08adfad94698-var-run\") pod \"ovn-controller-ovs-5mdkq\" (UID: \"391f1e55-ea6a-4d2f-ae2a-08adfad94698\") " pod="openstack/ovn-controller-ovs-5mdkq" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.820734 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d-var-log-ovn\") pod \"ovn-controller-qmltj\" (UID: \"5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d\") " pod="openstack/ovn-controller-qmltj" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.820826 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d-var-run-ovn\") pod \"ovn-controller-qmltj\" (UID: \"5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d\") " pod="openstack/ovn-controller-qmltj" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.820850 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d-var-run\") pod \"ovn-controller-qmltj\" (UID: \"5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d\") " pod="openstack/ovn-controller-qmltj" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.821041 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/391f1e55-ea6a-4d2f-ae2a-08adfad94698-etc-ovs\") pod \"ovn-controller-ovs-5mdkq\" (UID: \"391f1e55-ea6a-4d2f-ae2a-08adfad94698\") " pod="openstack/ovn-controller-ovs-5mdkq" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.821195 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/391f1e55-ea6a-4d2f-ae2a-08adfad94698-var-lib\") pod \"ovn-controller-ovs-5mdkq\" (UID: \"391f1e55-ea6a-4d2f-ae2a-08adfad94698\") " pod="openstack/ovn-controller-ovs-5mdkq" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.822528 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/391f1e55-ea6a-4d2f-ae2a-08adfad94698-scripts\") pod \"ovn-controller-ovs-5mdkq\" (UID: \"391f1e55-ea6a-4d2f-ae2a-08adfad94698\") " pod="openstack/ovn-controller-ovs-5mdkq" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.825731 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d-scripts\") pod \"ovn-controller-qmltj\" (UID: \"5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d\") " pod="openstack/ovn-controller-qmltj" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.835524 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d-ovn-controller-tls-certs\") pod \"ovn-controller-qmltj\" (UID: \"5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d\") " pod="openstack/ovn-controller-qmltj" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.835787 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d-combined-ca-bundle\") pod \"ovn-controller-qmltj\" (UID: \"5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d\") " pod="openstack/ovn-controller-qmltj" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.840725 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slgc6\" (UniqueName: \"kubernetes.io/projected/391f1e55-ea6a-4d2f-ae2a-08adfad94698-kube-api-access-slgc6\") pod \"ovn-controller-ovs-5mdkq\" (UID: \"391f1e55-ea6a-4d2f-ae2a-08adfad94698\") " pod="openstack/ovn-controller-ovs-5mdkq" Oct 08 07:31:59 crc kubenswrapper[4693]: I1008 07:31:59.843344 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7vs4\" (UniqueName: \"kubernetes.io/projected/5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d-kube-api-access-t7vs4\") pod \"ovn-controller-qmltj\" (UID: \"5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d\") " pod="openstack/ovn-controller-qmltj" Oct 08 07:32:00 crc kubenswrapper[4693]: I1008 07:32:00.016416 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-qmltj" Oct 08 07:32:00 crc kubenswrapper[4693]: I1008 07:32:00.038998 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-5mdkq" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.459000 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.461182 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.463734 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.464237 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-8lz7x" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.464411 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.464527 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.466244 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.470408 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.652267 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-nb-0\" (UID: \"02c7bbbf-7730-4f24-b131-92411b14dcb0\") " pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.652335 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfqnf\" (UniqueName: \"kubernetes.io/projected/02c7bbbf-7730-4f24-b131-92411b14dcb0-kube-api-access-vfqnf\") pod \"ovsdbserver-nb-0\" (UID: \"02c7bbbf-7730-4f24-b131-92411b14dcb0\") " pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.652372 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/02c7bbbf-7730-4f24-b131-92411b14dcb0-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"02c7bbbf-7730-4f24-b131-92411b14dcb0\") " pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.652457 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/02c7bbbf-7730-4f24-b131-92411b14dcb0-config\") pod \"ovsdbserver-nb-0\" (UID: \"02c7bbbf-7730-4f24-b131-92411b14dcb0\") " pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.652499 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/02c7bbbf-7730-4f24-b131-92411b14dcb0-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"02c7bbbf-7730-4f24-b131-92411b14dcb0\") " pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.652560 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/02c7bbbf-7730-4f24-b131-92411b14dcb0-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"02c7bbbf-7730-4f24-b131-92411b14dcb0\") " pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.652588 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/02c7bbbf-7730-4f24-b131-92411b14dcb0-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"02c7bbbf-7730-4f24-b131-92411b14dcb0\") " pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.652658 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02c7bbbf-7730-4f24-b131-92411b14dcb0-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"02c7bbbf-7730-4f24-b131-92411b14dcb0\") " pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.754266 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/02c7bbbf-7730-4f24-b131-92411b14dcb0-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"02c7bbbf-7730-4f24-b131-92411b14dcb0\") " pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.754418 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/02c7bbbf-7730-4f24-b131-92411b14dcb0-config\") pod \"ovsdbserver-nb-0\" (UID: \"02c7bbbf-7730-4f24-b131-92411b14dcb0\") " pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.754449 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/02c7bbbf-7730-4f24-b131-92411b14dcb0-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"02c7bbbf-7730-4f24-b131-92411b14dcb0\") " pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.754513 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/02c7bbbf-7730-4f24-b131-92411b14dcb0-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"02c7bbbf-7730-4f24-b131-92411b14dcb0\") " pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.754539 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/02c7bbbf-7730-4f24-b131-92411b14dcb0-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"02c7bbbf-7730-4f24-b131-92411b14dcb0\") " pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.754789 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02c7bbbf-7730-4f24-b131-92411b14dcb0-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"02c7bbbf-7730-4f24-b131-92411b14dcb0\") " pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.754891 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-nb-0\" (UID: \"02c7bbbf-7730-4f24-b131-92411b14dcb0\") " pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.755038 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfqnf\" (UniqueName: \"kubernetes.io/projected/02c7bbbf-7730-4f24-b131-92411b14dcb0-kube-api-access-vfqnf\") pod \"ovsdbserver-nb-0\" (UID: \"02c7bbbf-7730-4f24-b131-92411b14dcb0\") " pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.755800 4693 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-nb-0\" (UID: \"02c7bbbf-7730-4f24-b131-92411b14dcb0\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.756062 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/02c7bbbf-7730-4f24-b131-92411b14dcb0-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"02c7bbbf-7730-4f24-b131-92411b14dcb0\") " pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.756562 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/02c7bbbf-7730-4f24-b131-92411b14dcb0-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"02c7bbbf-7730-4f24-b131-92411b14dcb0\") " pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.756606 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/02c7bbbf-7730-4f24-b131-92411b14dcb0-config\") pod \"ovsdbserver-nb-0\" (UID: \"02c7bbbf-7730-4f24-b131-92411b14dcb0\") " pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.761291 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/02c7bbbf-7730-4f24-b131-92411b14dcb0-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"02c7bbbf-7730-4f24-b131-92411b14dcb0\") " pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.774016 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfqnf\" (UniqueName: \"kubernetes.io/projected/02c7bbbf-7730-4f24-b131-92411b14dcb0-kube-api-access-vfqnf\") pod \"ovsdbserver-nb-0\" (UID: \"02c7bbbf-7730-4f24-b131-92411b14dcb0\") " pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.774016 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02c7bbbf-7730-4f24-b131-92411b14dcb0-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"02c7bbbf-7730-4f24-b131-92411b14dcb0\") " pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.777463 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/02c7bbbf-7730-4f24-b131-92411b14dcb0-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"02c7bbbf-7730-4f24-b131-92411b14dcb0\") " pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.789942 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-nb-0\" (UID: \"02c7bbbf-7730-4f24-b131-92411b14dcb0\") " pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:01 crc kubenswrapper[4693]: I1008 07:32:01.803309 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:02 crc kubenswrapper[4693]: I1008 07:32:02.924597 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 08 07:32:02 crc kubenswrapper[4693]: I1008 07:32:02.933192 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 08 07:32:02 crc kubenswrapper[4693]: I1008 07:32:02.933306 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:02 crc kubenswrapper[4693]: I1008 07:32:02.936398 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Oct 08 07:32:02 crc kubenswrapper[4693]: I1008 07:32:02.936778 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Oct 08 07:32:02 crc kubenswrapper[4693]: I1008 07:32:02.936994 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Oct 08 07:32:02 crc kubenswrapper[4693]: I1008 07:32:02.937971 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-49x56" Oct 08 07:32:03 crc kubenswrapper[4693]: I1008 07:32:03.113569 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d809602-04d2-4d1f-b024-30fecd9b2256-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"5d809602-04d2-4d1f-b024-30fecd9b2256\") " pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:03 crc kubenswrapper[4693]: I1008 07:32:03.113931 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d809602-04d2-4d1f-b024-30fecd9b2256-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"5d809602-04d2-4d1f-b024-30fecd9b2256\") " pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:03 crc kubenswrapper[4693]: I1008 07:32:03.114009 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d809602-04d2-4d1f-b024-30fecd9b2256-config\") pod \"ovsdbserver-sb-0\" (UID: \"5d809602-04d2-4d1f-b024-30fecd9b2256\") " pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:03 crc kubenswrapper[4693]: I1008 07:32:03.114078 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5d809602-04d2-4d1f-b024-30fecd9b2256-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"5d809602-04d2-4d1f-b024-30fecd9b2256\") " pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:03 crc kubenswrapper[4693]: I1008 07:32:03.114118 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5d809602-04d2-4d1f-b024-30fecd9b2256-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"5d809602-04d2-4d1f-b024-30fecd9b2256\") " pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:03 crc kubenswrapper[4693]: I1008 07:32:03.114146 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"5d809602-04d2-4d1f-b024-30fecd9b2256\") " pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:03 crc kubenswrapper[4693]: I1008 07:32:03.114281 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2c5l4\" (UniqueName: \"kubernetes.io/projected/5d809602-04d2-4d1f-b024-30fecd9b2256-kube-api-access-2c5l4\") pod \"ovsdbserver-sb-0\" (UID: \"5d809602-04d2-4d1f-b024-30fecd9b2256\") " pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:03 crc kubenswrapper[4693]: I1008 07:32:03.114340 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d809602-04d2-4d1f-b024-30fecd9b2256-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"5d809602-04d2-4d1f-b024-30fecd9b2256\") " pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:03 crc kubenswrapper[4693]: I1008 07:32:03.215823 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d809602-04d2-4d1f-b024-30fecd9b2256-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"5d809602-04d2-4d1f-b024-30fecd9b2256\") " pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:03 crc kubenswrapper[4693]: I1008 07:32:03.215861 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d809602-04d2-4d1f-b024-30fecd9b2256-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"5d809602-04d2-4d1f-b024-30fecd9b2256\") " pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:03 crc kubenswrapper[4693]: I1008 07:32:03.215924 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d809602-04d2-4d1f-b024-30fecd9b2256-config\") pod \"ovsdbserver-sb-0\" (UID: \"5d809602-04d2-4d1f-b024-30fecd9b2256\") " pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:03 crc kubenswrapper[4693]: I1008 07:32:03.215943 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5d809602-04d2-4d1f-b024-30fecd9b2256-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"5d809602-04d2-4d1f-b024-30fecd9b2256\") " pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:03 crc kubenswrapper[4693]: I1008 07:32:03.215958 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5d809602-04d2-4d1f-b024-30fecd9b2256-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"5d809602-04d2-4d1f-b024-30fecd9b2256\") " pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:03 crc kubenswrapper[4693]: I1008 07:32:03.215976 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"5d809602-04d2-4d1f-b024-30fecd9b2256\") " pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:03 crc kubenswrapper[4693]: I1008 07:32:03.215999 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2c5l4\" (UniqueName: \"kubernetes.io/projected/5d809602-04d2-4d1f-b024-30fecd9b2256-kube-api-access-2c5l4\") pod \"ovsdbserver-sb-0\" (UID: \"5d809602-04d2-4d1f-b024-30fecd9b2256\") " pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:03 crc kubenswrapper[4693]: I1008 07:32:03.216015 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d809602-04d2-4d1f-b024-30fecd9b2256-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"5d809602-04d2-4d1f-b024-30fecd9b2256\") " pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:03 crc kubenswrapper[4693]: I1008 07:32:03.217125 4693 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"5d809602-04d2-4d1f-b024-30fecd9b2256\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:03 crc kubenswrapper[4693]: I1008 07:32:03.217565 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5d809602-04d2-4d1f-b024-30fecd9b2256-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"5d809602-04d2-4d1f-b024-30fecd9b2256\") " pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:03 crc kubenswrapper[4693]: I1008 07:32:03.218278 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d809602-04d2-4d1f-b024-30fecd9b2256-config\") pod \"ovsdbserver-sb-0\" (UID: \"5d809602-04d2-4d1f-b024-30fecd9b2256\") " pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:03 crc kubenswrapper[4693]: I1008 07:32:03.219298 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5d809602-04d2-4d1f-b024-30fecd9b2256-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"5d809602-04d2-4d1f-b024-30fecd9b2256\") " pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:03 crc kubenswrapper[4693]: I1008 07:32:03.219573 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d809602-04d2-4d1f-b024-30fecd9b2256-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"5d809602-04d2-4d1f-b024-30fecd9b2256\") " pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:03 crc kubenswrapper[4693]: I1008 07:32:03.220215 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d809602-04d2-4d1f-b024-30fecd9b2256-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"5d809602-04d2-4d1f-b024-30fecd9b2256\") " pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:03 crc kubenswrapper[4693]: I1008 07:32:03.222419 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d809602-04d2-4d1f-b024-30fecd9b2256-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"5d809602-04d2-4d1f-b024-30fecd9b2256\") " pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:03 crc kubenswrapper[4693]: I1008 07:32:03.236471 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2c5l4\" (UniqueName: \"kubernetes.io/projected/5d809602-04d2-4d1f-b024-30fecd9b2256-kube-api-access-2c5l4\") pod \"ovsdbserver-sb-0\" (UID: \"5d809602-04d2-4d1f-b024-30fecd9b2256\") " pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:03 crc kubenswrapper[4693]: I1008 07:32:03.241435 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"5d809602-04d2-4d1f-b024-30fecd9b2256\") " pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:03 crc kubenswrapper[4693]: I1008 07:32:03.257700 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:06 crc kubenswrapper[4693]: E1008 07:32:06.132090 4693 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 08 07:32:06 crc kubenswrapper[4693]: E1008 07:32:06.132561 4693 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zm9h2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-m4dbh_openstack(34179f0f-280a-4da3-a6d1-7b924b798ab2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 08 07:32:06 crc kubenswrapper[4693]: E1008 07:32:06.133880 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-m4dbh" podUID="34179f0f-280a-4da3-a6d1-7b924b798ab2" Oct 08 07:32:06 crc kubenswrapper[4693]: E1008 07:32:06.177541 4693 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 08 07:32:06 crc kubenswrapper[4693]: E1008 07:32:06.177778 4693 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wf965,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-r8hm9_openstack(4b35ec4a-da73-4069-b14e-341b56e5718f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 08 07:32:06 crc kubenswrapper[4693]: E1008 07:32:06.179000 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-r8hm9" podUID="4b35ec4a-da73-4069-b14e-341b56e5718f" Oct 08 07:32:06 crc kubenswrapper[4693]: E1008 07:32:06.200216 4693 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 08 07:32:06 crc kubenswrapper[4693]: E1008 07:32:06.200382 4693 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hdp5f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-pd96z_openstack(e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 08 07:32:06 crc kubenswrapper[4693]: E1008 07:32:06.201674 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-pd96z" podUID="e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204" Oct 08 07:32:06 crc kubenswrapper[4693]: E1008 07:32:06.240234 4693 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 08 07:32:06 crc kubenswrapper[4693]: E1008 07:32:06.240462 4693 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-z4gtp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-wtj2r_openstack(22f66159-7f80-41e9-8772-0c861d302ebc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 08 07:32:06 crc kubenswrapper[4693]: E1008 07:32:06.241744 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-wtj2r" podUID="22f66159-7f80-41e9-8772-0c861d302ebc" Oct 08 07:32:06 crc kubenswrapper[4693]: E1008 07:32:06.418714 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-r8hm9" podUID="4b35ec4a-da73-4069-b14e-341b56e5718f" Oct 08 07:32:06 crc kubenswrapper[4693]: E1008 07:32:06.442600 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-pd96z" podUID="e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204" Oct 08 07:32:06 crc kubenswrapper[4693]: I1008 07:32:06.939379 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-m4dbh" Oct 08 07:32:06 crc kubenswrapper[4693]: I1008 07:32:06.980324 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 08 07:32:06 crc kubenswrapper[4693]: W1008 07:32:06.990562 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1aa3187a_5fce_4486_a846_709a6231383f.slice/crio-b7d415e31eced22adc2d62ea972ba09639aa59dadc105a833db1128238356081 WatchSource:0}: Error finding container b7d415e31eced22adc2d62ea972ba09639aa59dadc105a833db1128238356081: Status 404 returned error can't find the container with id b7d415e31eced22adc2d62ea972ba09639aa59dadc105a833db1128238356081 Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.004929 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-wtj2r" Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.005582 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 08 07:32:07 crc kubenswrapper[4693]: W1008 07:32:07.013120 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod225a20e0_eec7_4b8c_89e1_b4a2ebb513a3.slice/crio-4f011da180ccea7bb17e25c0a9e6c2d2a1eceeab87545fd8ac4fc489f8727453 WatchSource:0}: Error finding container 4f011da180ccea7bb17e25c0a9e6c2d2a1eceeab87545fd8ac4fc489f8727453: Status 404 returned error can't find the container with id 4f011da180ccea7bb17e25c0a9e6c2d2a1eceeab87545fd8ac4fc489f8727453 Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.020071 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34179f0f-280a-4da3-a6d1-7b924b798ab2-config\") pod \"34179f0f-280a-4da3-a6d1-7b924b798ab2\" (UID: \"34179f0f-280a-4da3-a6d1-7b924b798ab2\") " Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.020132 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zm9h2\" (UniqueName: \"kubernetes.io/projected/34179f0f-280a-4da3-a6d1-7b924b798ab2-kube-api-access-zm9h2\") pod \"34179f0f-280a-4da3-a6d1-7b924b798ab2\" (UID: \"34179f0f-280a-4da3-a6d1-7b924b798ab2\") " Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.020156 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22f66159-7f80-41e9-8772-0c861d302ebc-config\") pod \"22f66159-7f80-41e9-8772-0c861d302ebc\" (UID: \"22f66159-7f80-41e9-8772-0c861d302ebc\") " Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.020174 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/22f66159-7f80-41e9-8772-0c861d302ebc-dns-svc\") pod \"22f66159-7f80-41e9-8772-0c861d302ebc\" (UID: \"22f66159-7f80-41e9-8772-0c861d302ebc\") " Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.020210 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z4gtp\" (UniqueName: \"kubernetes.io/projected/22f66159-7f80-41e9-8772-0c861d302ebc-kube-api-access-z4gtp\") pod \"22f66159-7f80-41e9-8772-0c861d302ebc\" (UID: \"22f66159-7f80-41e9-8772-0c861d302ebc\") " Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.022252 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22f66159-7f80-41e9-8772-0c861d302ebc-config" (OuterVolumeSpecName: "config") pod "22f66159-7f80-41e9-8772-0c861d302ebc" (UID: "22f66159-7f80-41e9-8772-0c861d302ebc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.022561 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22f66159-7f80-41e9-8772-0c861d302ebc-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "22f66159-7f80-41e9-8772-0c861d302ebc" (UID: "22f66159-7f80-41e9-8772-0c861d302ebc"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.022611 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34179f0f-280a-4da3-a6d1-7b924b798ab2-config" (OuterVolumeSpecName: "config") pod "34179f0f-280a-4da3-a6d1-7b924b798ab2" (UID: "34179f0f-280a-4da3-a6d1-7b924b798ab2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.029070 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34179f0f-280a-4da3-a6d1-7b924b798ab2-kube-api-access-zm9h2" (OuterVolumeSpecName: "kube-api-access-zm9h2") pod "34179f0f-280a-4da3-a6d1-7b924b798ab2" (UID: "34179f0f-280a-4da3-a6d1-7b924b798ab2"). InnerVolumeSpecName "kube-api-access-zm9h2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.029623 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22f66159-7f80-41e9-8772-0c861d302ebc-kube-api-access-z4gtp" (OuterVolumeSpecName: "kube-api-access-z4gtp") pod "22f66159-7f80-41e9-8772-0c861d302ebc" (UID: "22f66159-7f80-41e9-8772-0c861d302ebc"). InnerVolumeSpecName "kube-api-access-z4gtp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.116991 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-qmltj"] Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.121549 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.123235 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z4gtp\" (UniqueName: \"kubernetes.io/projected/22f66159-7f80-41e9-8772-0c861d302ebc-kube-api-access-z4gtp\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.123264 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34179f0f-280a-4da3-a6d1-7b924b798ab2-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.123281 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zm9h2\" (UniqueName: \"kubernetes.io/projected/34179f0f-280a-4da3-a6d1-7b924b798ab2-kube-api-access-zm9h2\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.123294 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22f66159-7f80-41e9-8772-0c861d302ebc-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.123306 4693 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/22f66159-7f80-41e9-8772-0c861d302ebc-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.140614 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.226208 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 08 07:32:07 crc kubenswrapper[4693]: W1008 07:32:07.244150 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a58b116_77cf_41d3_87cd_99880a4db87f.slice/crio-8f55365903111de135f4d8504db9da6a9482b5a13b7e01fb33783d0d8f12d5d2 WatchSource:0}: Error finding container 8f55365903111de135f4d8504db9da6a9482b5a13b7e01fb33783d0d8f12d5d2: Status 404 returned error can't find the container with id 8f55365903111de135f4d8504db9da6a9482b5a13b7e01fb33783d0d8f12d5d2 Oct 08 07:32:07 crc kubenswrapper[4693]: W1008 07:32:07.253523 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02c7bbbf_7730_4f24_b131_92411b14dcb0.slice/crio-363304809c234cc1a6686cdc0a40f2815f8adf44dec5035849b264aa4cea3a06 WatchSource:0}: Error finding container 363304809c234cc1a6686cdc0a40f2815f8adf44dec5035849b264aa4cea3a06: Status 404 returned error can't find the container with id 363304809c234cc1a6686cdc0a40f2815f8adf44dec5035849b264aa4cea3a06 Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.318167 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-5mdkq"] Oct 08 07:32:07 crc kubenswrapper[4693]: W1008 07:32:07.320089 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod391f1e55_ea6a_4d2f_ae2a_08adfad94698.slice/crio-7e40d7d19cc043603b66435e631b1c0f33ecc14d73d88e81aed08f157896fa43 WatchSource:0}: Error finding container 7e40d7d19cc043603b66435e631b1c0f33ecc14d73d88e81aed08f157896fa43: Status 404 returned error can't find the container with id 7e40d7d19cc043603b66435e631b1c0f33ecc14d73d88e81aed08f157896fa43 Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.422526 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-qmltj" event={"ID":"5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d","Type":"ContainerStarted","Data":"6dead2a0679826adf5050fc839bc5ce0268f91cb94daff3dfa56db6f41f2fbec"} Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.423688 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"225a20e0-eec7-4b8c-89e1-b4a2ebb513a3","Type":"ContainerStarted","Data":"4f011da180ccea7bb17e25c0a9e6c2d2a1eceeab87545fd8ac4fc489f8727453"} Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.424577 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-wtj2r" Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.424584 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-wtj2r" event={"ID":"22f66159-7f80-41e9-8772-0c861d302ebc","Type":"ContainerDied","Data":"b69719aa360b2bce55aad2c4932bb04e0ed1c34699c8cf9a617e7968bf6a4209"} Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.425513 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"c4cf2123-362f-4d9f-8080-bd9d6e13de17","Type":"ContainerStarted","Data":"b5a7ebb7992327d1da4914e93c6f3c245d560c29e1dd737756034cf94a325916"} Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.427292 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"1aa3187a-5fce-4486-a846-709a6231383f","Type":"ContainerStarted","Data":"b7d415e31eced22adc2d62ea972ba09639aa59dadc105a833db1128238356081"} Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.428971 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-m4dbh" event={"ID":"34179f0f-280a-4da3-a6d1-7b924b798ab2","Type":"ContainerDied","Data":"31638e4f7ee80ff0f01563f64ee6ea28194739c2675149f520b4674bf917c8da"} Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.429052 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-m4dbh" Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.431269 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"02c7bbbf-7730-4f24-b131-92411b14dcb0","Type":"ContainerStarted","Data":"363304809c234cc1a6686cdc0a40f2815f8adf44dec5035849b264aa4cea3a06"} Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.432652 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5mdkq" event={"ID":"391f1e55-ea6a-4d2f-ae2a-08adfad94698","Type":"ContainerStarted","Data":"7e40d7d19cc043603b66435e631b1c0f33ecc14d73d88e81aed08f157896fa43"} Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.434475 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"8a58b116-77cf-41d3-87cd-99880a4db87f","Type":"ContainerStarted","Data":"8f55365903111de135f4d8504db9da6a9482b5a13b7e01fb33783d0d8f12d5d2"} Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.467782 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-m4dbh"] Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.473991 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-m4dbh"] Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.502708 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-wtj2r"] Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.508349 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-wtj2r"] Oct 08 07:32:07 crc kubenswrapper[4693]: I1008 07:32:07.802760 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 08 07:32:08 crc kubenswrapper[4693]: I1008 07:32:08.445032 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"5d809602-04d2-4d1f-b024-30fecd9b2256","Type":"ContainerStarted","Data":"14acf9183cf79f8d3c7027783b394536d5041051a38a5bc9ec13e03e9631f25b"} Oct 08 07:32:09 crc kubenswrapper[4693]: I1008 07:32:09.374029 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22f66159-7f80-41e9-8772-0c861d302ebc" path="/var/lib/kubelet/pods/22f66159-7f80-41e9-8772-0c861d302ebc/volumes" Oct 08 07:32:09 crc kubenswrapper[4693]: I1008 07:32:09.374443 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34179f0f-280a-4da3-a6d1-7b924b798ab2" path="/var/lib/kubelet/pods/34179f0f-280a-4da3-a6d1-7b924b798ab2/volumes" Oct 08 07:32:09 crc kubenswrapper[4693]: I1008 07:32:09.453488 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cc3c1ad2-7355-4db4-af71-27c3454a025c","Type":"ContainerStarted","Data":"1bba1bea243da621ea1b6179ad5db08abecc8d2aa48bc3f641980d1431dcd318"} Oct 08 07:32:09 crc kubenswrapper[4693]: I1008 07:32:09.455859 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"344b4125-6848-4985-b722-8e9e589b1ab4","Type":"ContainerStarted","Data":"59b5f5fb3f59acdbcb78be6d525d4a9569344861cb9fbcb63563f2eda385b2f5"} Oct 08 07:32:16 crc kubenswrapper[4693]: I1008 07:32:16.534491 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-qmltj" event={"ID":"5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d","Type":"ContainerStarted","Data":"4603af4f127d8d3c6a1b15d44fce347c24d971c43b14c1a55864f77e4c705ed9"} Oct 08 07:32:16 crc kubenswrapper[4693]: I1008 07:32:16.535096 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-qmltj" Oct 08 07:32:16 crc kubenswrapper[4693]: I1008 07:32:16.541533 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"5d809602-04d2-4d1f-b024-30fecd9b2256","Type":"ContainerStarted","Data":"686454c218f3696ebbe7752ec1ce8ca1350372ddaa463ca909ce17b5eddf28f3"} Oct 08 07:32:16 crc kubenswrapper[4693]: I1008 07:32:16.543756 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"225a20e0-eec7-4b8c-89e1-b4a2ebb513a3","Type":"ContainerStarted","Data":"ac57e4c2764da74004cce884625b4d52f4ba527e2be4ae4898850d51cb778380"} Oct 08 07:32:16 crc kubenswrapper[4693]: I1008 07:32:16.544030 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Oct 08 07:32:16 crc kubenswrapper[4693]: I1008 07:32:16.545881 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"02c7bbbf-7730-4f24-b131-92411b14dcb0","Type":"ContainerStarted","Data":"7159ef252904473b25f872f0b6a2624477269e1040b5f78f3eb845436d3912d4"} Oct 08 07:32:16 crc kubenswrapper[4693]: I1008 07:32:16.548875 4693 generic.go:334] "Generic (PLEG): container finished" podID="391f1e55-ea6a-4d2f-ae2a-08adfad94698" containerID="534e4a6ac5b4f1027e0791e945a5c17227c4651f65187c4afbcf104f02325081" exitCode=0 Oct 08 07:32:16 crc kubenswrapper[4693]: I1008 07:32:16.548937 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5mdkq" event={"ID":"391f1e55-ea6a-4d2f-ae2a-08adfad94698","Type":"ContainerDied","Data":"534e4a6ac5b4f1027e0791e945a5c17227c4651f65187c4afbcf104f02325081"} Oct 08 07:32:16 crc kubenswrapper[4693]: I1008 07:32:16.555244 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"8a58b116-77cf-41d3-87cd-99880a4db87f","Type":"ContainerStarted","Data":"8cd9894db58134c0789884d1ed536614ad507f30ab2bfb0efca2e2de0630124d"} Oct 08 07:32:16 crc kubenswrapper[4693]: I1008 07:32:16.555777 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 08 07:32:16 crc kubenswrapper[4693]: I1008 07:32:16.560005 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-qmltj" podStartSLOduration=10.521195929 podStartE2EDuration="17.559988345s" podCreationTimestamp="2025-10-08 07:31:59 +0000 UTC" firstStartedPulling="2025-10-08 07:32:07.155268459 +0000 UTC m=+912.526233394" lastFinishedPulling="2025-10-08 07:32:14.194060845 +0000 UTC m=+919.565025810" observedRunningTime="2025-10-08 07:32:16.549961491 +0000 UTC m=+921.920926416" watchObservedRunningTime="2025-10-08 07:32:16.559988345 +0000 UTC m=+921.930953270" Oct 08 07:32:16 crc kubenswrapper[4693]: I1008 07:32:16.560898 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"c4cf2123-362f-4d9f-8080-bd9d6e13de17","Type":"ContainerStarted","Data":"fc1abf09452510894ab59143ee519d5c32299e07475752479109f701aa3b6c2d"} Oct 08 07:32:16 crc kubenswrapper[4693]: I1008 07:32:16.563294 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"1aa3187a-5fce-4486-a846-709a6231383f","Type":"ContainerStarted","Data":"4abcfae67c789f6f4e8e475ee1beb3889aa03576176ef5188091b0107d13c611"} Oct 08 07:32:16 crc kubenswrapper[4693]: I1008 07:32:16.576690 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=15.39799247 podStartE2EDuration="22.576667875s" podCreationTimestamp="2025-10-08 07:31:54 +0000 UTC" firstStartedPulling="2025-10-08 07:32:07.017407993 +0000 UTC m=+912.388372948" lastFinishedPulling="2025-10-08 07:32:14.196083418 +0000 UTC m=+919.567048353" observedRunningTime="2025-10-08 07:32:16.56926424 +0000 UTC m=+921.940229175" watchObservedRunningTime="2025-10-08 07:32:16.576667875 +0000 UTC m=+921.947632810" Oct 08 07:32:16 crc kubenswrapper[4693]: I1008 07:32:16.608276 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=13.613462868 podStartE2EDuration="21.608262679s" podCreationTimestamp="2025-10-08 07:31:55 +0000 UTC" firstStartedPulling="2025-10-08 07:32:07.245952961 +0000 UTC m=+912.616917896" lastFinishedPulling="2025-10-08 07:32:15.240752762 +0000 UTC m=+920.611717707" observedRunningTime="2025-10-08 07:32:16.603316608 +0000 UTC m=+921.974281543" watchObservedRunningTime="2025-10-08 07:32:16.608262679 +0000 UTC m=+921.979227614" Oct 08 07:32:17 crc kubenswrapper[4693]: I1008 07:32:17.571141 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5mdkq" event={"ID":"391f1e55-ea6a-4d2f-ae2a-08adfad94698","Type":"ContainerStarted","Data":"8fb5742fc5eb368cdb99111aacbc03f9e3604084a0443dbbbcbd675327c4bf1f"} Oct 08 07:32:17 crc kubenswrapper[4693]: I1008 07:32:17.571542 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5mdkq" event={"ID":"391f1e55-ea6a-4d2f-ae2a-08adfad94698","Type":"ContainerStarted","Data":"c8369aa5db936ce8acd8137d2a5aefd5fd4767ec0a9784751d87e10a982d434e"} Oct 08 07:32:17 crc kubenswrapper[4693]: I1008 07:32:17.605182 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-5mdkq" podStartSLOduration=11.64804493 podStartE2EDuration="18.605166212s" podCreationTimestamp="2025-10-08 07:31:59 +0000 UTC" firstStartedPulling="2025-10-08 07:32:07.322254303 +0000 UTC m=+912.693219238" lastFinishedPulling="2025-10-08 07:32:14.279375585 +0000 UTC m=+919.650340520" observedRunningTime="2025-10-08 07:32:17.600330504 +0000 UTC m=+922.971295469" watchObservedRunningTime="2025-10-08 07:32:17.605166212 +0000 UTC m=+922.976131147" Oct 08 07:32:18 crc kubenswrapper[4693]: I1008 07:32:18.581339 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-5mdkq" Oct 08 07:32:18 crc kubenswrapper[4693]: I1008 07:32:18.581419 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-5mdkq" Oct 08 07:32:18 crc kubenswrapper[4693]: E1008 07:32:18.881352 4693 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc4cf2123_362f_4d9f_8080_bd9d6e13de17.slice/crio-conmon-fc1abf09452510894ab59143ee519d5c32299e07475752479109f701aa3b6c2d.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc4cf2123_362f_4d9f_8080_bd9d6e13de17.slice/crio-fc1abf09452510894ab59143ee519d5c32299e07475752479109f701aa3b6c2d.scope\": RecentStats: unable to find data in memory cache]" Oct 08 07:32:19 crc kubenswrapper[4693]: I1008 07:32:19.593618 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"5d809602-04d2-4d1f-b024-30fecd9b2256","Type":"ContainerStarted","Data":"e99d02c538ee41e4a6ce75497b55dcd1f24f00a9e59fa31b3f4b1babaed0f781"} Oct 08 07:32:19 crc kubenswrapper[4693]: I1008 07:32:19.598384 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"02c7bbbf-7730-4f24-b131-92411b14dcb0","Type":"ContainerStarted","Data":"56a3f16e4441ac92e96ab4070a068bc392ee967dfa1fbcd190d3a05b29c202dc"} Oct 08 07:32:19 crc kubenswrapper[4693]: I1008 07:32:19.600676 4693 generic.go:334] "Generic (PLEG): container finished" podID="c4cf2123-362f-4d9f-8080-bd9d6e13de17" containerID="fc1abf09452510894ab59143ee519d5c32299e07475752479109f701aa3b6c2d" exitCode=0 Oct 08 07:32:19 crc kubenswrapper[4693]: I1008 07:32:19.600755 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"c4cf2123-362f-4d9f-8080-bd9d6e13de17","Type":"ContainerDied","Data":"fc1abf09452510894ab59143ee519d5c32299e07475752479109f701aa3b6c2d"} Oct 08 07:32:19 crc kubenswrapper[4693]: I1008 07:32:19.602845 4693 generic.go:334] "Generic (PLEG): container finished" podID="1aa3187a-5fce-4486-a846-709a6231383f" containerID="4abcfae67c789f6f4e8e475ee1beb3889aa03576176ef5188091b0107d13c611" exitCode=0 Oct 08 07:32:19 crc kubenswrapper[4693]: I1008 07:32:19.602883 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"1aa3187a-5fce-4486-a846-709a6231383f","Type":"ContainerDied","Data":"4abcfae67c789f6f4e8e475ee1beb3889aa03576176ef5188091b0107d13c611"} Oct 08 07:32:19 crc kubenswrapper[4693]: I1008 07:32:19.672428 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=7.303602337 podStartE2EDuration="18.672401115s" podCreationTimestamp="2025-10-08 07:32:01 +0000 UTC" firstStartedPulling="2025-10-08 07:32:07.817706331 +0000 UTC m=+913.188671296" lastFinishedPulling="2025-10-08 07:32:19.186505139 +0000 UTC m=+924.557470074" observedRunningTime="2025-10-08 07:32:19.630828788 +0000 UTC m=+925.001793733" watchObservedRunningTime="2025-10-08 07:32:19.672401115 +0000 UTC m=+925.043366090" Oct 08 07:32:19 crc kubenswrapper[4693]: I1008 07:32:19.735024 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=7.82447662 podStartE2EDuration="19.735002296s" podCreationTimestamp="2025-10-08 07:32:00 +0000 UTC" firstStartedPulling="2025-10-08 07:32:07.256586341 +0000 UTC m=+912.627551276" lastFinishedPulling="2025-10-08 07:32:19.167112017 +0000 UTC m=+924.538076952" observedRunningTime="2025-10-08 07:32:19.719556668 +0000 UTC m=+925.090521653" watchObservedRunningTime="2025-10-08 07:32:19.735002296 +0000 UTC m=+925.105967231" Oct 08 07:32:19 crc kubenswrapper[4693]: I1008 07:32:19.804486 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:19 crc kubenswrapper[4693]: I1008 07:32:19.873296 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:20 crc kubenswrapper[4693]: I1008 07:32:20.616354 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"c4cf2123-362f-4d9f-8080-bd9d6e13de17","Type":"ContainerStarted","Data":"3d56b3b9950220881f2c43a3400907de9d364ee1ba8251bfb7492ba402b773dc"} Oct 08 07:32:20 crc kubenswrapper[4693]: I1008 07:32:20.620673 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"1aa3187a-5fce-4486-a846-709a6231383f","Type":"ContainerStarted","Data":"4e142cd24b266f6e314f83c754583ea5e92b240d9e0f92cd1148bacd9b07661c"} Oct 08 07:32:20 crc kubenswrapper[4693]: I1008 07:32:20.621889 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:20 crc kubenswrapper[4693]: I1008 07:32:20.656645 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=21.626866237 podStartE2EDuration="28.656621994s" podCreationTimestamp="2025-10-08 07:31:52 +0000 UTC" firstStartedPulling="2025-10-08 07:32:07.249623748 +0000 UTC m=+912.620588673" lastFinishedPulling="2025-10-08 07:32:14.279379455 +0000 UTC m=+919.650344430" observedRunningTime="2025-10-08 07:32:20.649221329 +0000 UTC m=+926.020186294" watchObservedRunningTime="2025-10-08 07:32:20.656621994 +0000 UTC m=+926.027586969" Oct 08 07:32:20 crc kubenswrapper[4693]: I1008 07:32:20.681604 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=22.127259309 podStartE2EDuration="29.681582892s" podCreationTimestamp="2025-10-08 07:31:51 +0000 UTC" firstStartedPulling="2025-10-08 07:32:06.994564421 +0000 UTC m=+912.365529356" lastFinishedPulling="2025-10-08 07:32:14.548887994 +0000 UTC m=+919.919852939" observedRunningTime="2025-10-08 07:32:20.67808215 +0000 UTC m=+926.049047125" watchObservedRunningTime="2025-10-08 07:32:20.681582892 +0000 UTC m=+926.052547847" Oct 08 07:32:20 crc kubenswrapper[4693]: I1008 07:32:20.700637 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Oct 08 07:32:20 crc kubenswrapper[4693]: I1008 07:32:20.961803 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-r8hm9"] Oct 08 07:32:20 crc kubenswrapper[4693]: I1008 07:32:20.994707 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-cggg7"] Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.000305 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-cggg7" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.002887 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.009335 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-cggg7"] Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.031778 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-47xpx"] Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.032734 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-47xpx" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.042181 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.073130 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-47xpx"] Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.115693 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/dd6332ac-70b3-4137-9419-3d394f270aa3-ovn-rundir\") pod \"ovn-controller-metrics-47xpx\" (UID: \"dd6332ac-70b3-4137-9419-3d394f270aa3\") " pod="openstack/ovn-controller-metrics-47xpx" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.115738 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd6332ac-70b3-4137-9419-3d394f270aa3-combined-ca-bundle\") pod \"ovn-controller-metrics-47xpx\" (UID: \"dd6332ac-70b3-4137-9419-3d394f270aa3\") " pod="openstack/ovn-controller-metrics-47xpx" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.115784 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zp2b\" (UniqueName: \"kubernetes.io/projected/61611c91-8e6e-451f-9964-4b05629e8727-kube-api-access-4zp2b\") pod \"dnsmasq-dns-7fd796d7df-cggg7\" (UID: \"61611c91-8e6e-451f-9964-4b05629e8727\") " pod="openstack/dnsmasq-dns-7fd796d7df-cggg7" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.115807 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61611c91-8e6e-451f-9964-4b05629e8727-config\") pod \"dnsmasq-dns-7fd796d7df-cggg7\" (UID: \"61611c91-8e6e-451f-9964-4b05629e8727\") " pod="openstack/dnsmasq-dns-7fd796d7df-cggg7" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.115845 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/61611c91-8e6e-451f-9964-4b05629e8727-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-cggg7\" (UID: \"61611c91-8e6e-451f-9964-4b05629e8727\") " pod="openstack/dnsmasq-dns-7fd796d7df-cggg7" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.115859 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/61611c91-8e6e-451f-9964-4b05629e8727-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-cggg7\" (UID: \"61611c91-8e6e-451f-9964-4b05629e8727\") " pod="openstack/dnsmasq-dns-7fd796d7df-cggg7" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.115874 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/dd6332ac-70b3-4137-9419-3d394f270aa3-ovs-rundir\") pod \"ovn-controller-metrics-47xpx\" (UID: \"dd6332ac-70b3-4137-9419-3d394f270aa3\") " pod="openstack/ovn-controller-metrics-47xpx" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.116013 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd6332ac-70b3-4137-9419-3d394f270aa3-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-47xpx\" (UID: \"dd6332ac-70b3-4137-9419-3d394f270aa3\") " pod="openstack/ovn-controller-metrics-47xpx" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.116123 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtsc9\" (UniqueName: \"kubernetes.io/projected/dd6332ac-70b3-4137-9419-3d394f270aa3-kube-api-access-mtsc9\") pod \"ovn-controller-metrics-47xpx\" (UID: \"dd6332ac-70b3-4137-9419-3d394f270aa3\") " pod="openstack/ovn-controller-metrics-47xpx" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.116159 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd6332ac-70b3-4137-9419-3d394f270aa3-config\") pod \"ovn-controller-metrics-47xpx\" (UID: \"dd6332ac-70b3-4137-9419-3d394f270aa3\") " pod="openstack/ovn-controller-metrics-47xpx" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.219565 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/61611c91-8e6e-451f-9964-4b05629e8727-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-cggg7\" (UID: \"61611c91-8e6e-451f-9964-4b05629e8727\") " pod="openstack/dnsmasq-dns-7fd796d7df-cggg7" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.219611 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/61611c91-8e6e-451f-9964-4b05629e8727-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-cggg7\" (UID: \"61611c91-8e6e-451f-9964-4b05629e8727\") " pod="openstack/dnsmasq-dns-7fd796d7df-cggg7" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.219628 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/dd6332ac-70b3-4137-9419-3d394f270aa3-ovs-rundir\") pod \"ovn-controller-metrics-47xpx\" (UID: \"dd6332ac-70b3-4137-9419-3d394f270aa3\") " pod="openstack/ovn-controller-metrics-47xpx" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.219664 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd6332ac-70b3-4137-9419-3d394f270aa3-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-47xpx\" (UID: \"dd6332ac-70b3-4137-9419-3d394f270aa3\") " pod="openstack/ovn-controller-metrics-47xpx" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.219694 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtsc9\" (UniqueName: \"kubernetes.io/projected/dd6332ac-70b3-4137-9419-3d394f270aa3-kube-api-access-mtsc9\") pod \"ovn-controller-metrics-47xpx\" (UID: \"dd6332ac-70b3-4137-9419-3d394f270aa3\") " pod="openstack/ovn-controller-metrics-47xpx" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.219714 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd6332ac-70b3-4137-9419-3d394f270aa3-config\") pod \"ovn-controller-metrics-47xpx\" (UID: \"dd6332ac-70b3-4137-9419-3d394f270aa3\") " pod="openstack/ovn-controller-metrics-47xpx" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.219752 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/dd6332ac-70b3-4137-9419-3d394f270aa3-ovn-rundir\") pod \"ovn-controller-metrics-47xpx\" (UID: \"dd6332ac-70b3-4137-9419-3d394f270aa3\") " pod="openstack/ovn-controller-metrics-47xpx" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.219774 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd6332ac-70b3-4137-9419-3d394f270aa3-combined-ca-bundle\") pod \"ovn-controller-metrics-47xpx\" (UID: \"dd6332ac-70b3-4137-9419-3d394f270aa3\") " pod="openstack/ovn-controller-metrics-47xpx" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.219824 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zp2b\" (UniqueName: \"kubernetes.io/projected/61611c91-8e6e-451f-9964-4b05629e8727-kube-api-access-4zp2b\") pod \"dnsmasq-dns-7fd796d7df-cggg7\" (UID: \"61611c91-8e6e-451f-9964-4b05629e8727\") " pod="openstack/dnsmasq-dns-7fd796d7df-cggg7" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.219845 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61611c91-8e6e-451f-9964-4b05629e8727-config\") pod \"dnsmasq-dns-7fd796d7df-cggg7\" (UID: \"61611c91-8e6e-451f-9964-4b05629e8727\") " pod="openstack/dnsmasq-dns-7fd796d7df-cggg7" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.220660 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61611c91-8e6e-451f-9964-4b05629e8727-config\") pod \"dnsmasq-dns-7fd796d7df-cggg7\" (UID: \"61611c91-8e6e-451f-9964-4b05629e8727\") " pod="openstack/dnsmasq-dns-7fd796d7df-cggg7" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.221098 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/dd6332ac-70b3-4137-9419-3d394f270aa3-ovn-rundir\") pod \"ovn-controller-metrics-47xpx\" (UID: \"dd6332ac-70b3-4137-9419-3d394f270aa3\") " pod="openstack/ovn-controller-metrics-47xpx" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.221713 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd6332ac-70b3-4137-9419-3d394f270aa3-config\") pod \"ovn-controller-metrics-47xpx\" (UID: \"dd6332ac-70b3-4137-9419-3d394f270aa3\") " pod="openstack/ovn-controller-metrics-47xpx" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.222244 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/61611c91-8e6e-451f-9964-4b05629e8727-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-cggg7\" (UID: \"61611c91-8e6e-451f-9964-4b05629e8727\") " pod="openstack/dnsmasq-dns-7fd796d7df-cggg7" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.222751 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/61611c91-8e6e-451f-9964-4b05629e8727-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-cggg7\" (UID: \"61611c91-8e6e-451f-9964-4b05629e8727\") " pod="openstack/dnsmasq-dns-7fd796d7df-cggg7" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.222796 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/dd6332ac-70b3-4137-9419-3d394f270aa3-ovs-rundir\") pod \"ovn-controller-metrics-47xpx\" (UID: \"dd6332ac-70b3-4137-9419-3d394f270aa3\") " pod="openstack/ovn-controller-metrics-47xpx" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.228486 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd6332ac-70b3-4137-9419-3d394f270aa3-combined-ca-bundle\") pod \"ovn-controller-metrics-47xpx\" (UID: \"dd6332ac-70b3-4137-9419-3d394f270aa3\") " pod="openstack/ovn-controller-metrics-47xpx" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.228497 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd6332ac-70b3-4137-9419-3d394f270aa3-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-47xpx\" (UID: \"dd6332ac-70b3-4137-9419-3d394f270aa3\") " pod="openstack/ovn-controller-metrics-47xpx" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.246085 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zp2b\" (UniqueName: \"kubernetes.io/projected/61611c91-8e6e-451f-9964-4b05629e8727-kube-api-access-4zp2b\") pod \"dnsmasq-dns-7fd796d7df-cggg7\" (UID: \"61611c91-8e6e-451f-9964-4b05629e8727\") " pod="openstack/dnsmasq-dns-7fd796d7df-cggg7" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.260486 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtsc9\" (UniqueName: \"kubernetes.io/projected/dd6332ac-70b3-4137-9419-3d394f270aa3-kube-api-access-mtsc9\") pod \"ovn-controller-metrics-47xpx\" (UID: \"dd6332ac-70b3-4137-9419-3d394f270aa3\") " pod="openstack/ovn-controller-metrics-47xpx" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.260783 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.279512 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-pd96z"] Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.338618 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-wb8fg"] Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.340047 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.347290 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.348363 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-cggg7" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.352501 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-wb8fg"] Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.359041 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.361195 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-47xpx" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.424210 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/10770f18-c298-4a77-a412-4f71ec5edd95-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-wb8fg\" (UID: \"10770f18-c298-4a77-a412-4f71ec5edd95\") " pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.424279 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/10770f18-c298-4a77-a412-4f71ec5edd95-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-wb8fg\" (UID: \"10770f18-c298-4a77-a412-4f71ec5edd95\") " pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.424337 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/10770f18-c298-4a77-a412-4f71ec5edd95-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-wb8fg\" (UID: \"10770f18-c298-4a77-a412-4f71ec5edd95\") " pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.424384 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10770f18-c298-4a77-a412-4f71ec5edd95-config\") pod \"dnsmasq-dns-86db49b7ff-wb8fg\" (UID: \"10770f18-c298-4a77-a412-4f71ec5edd95\") " pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.424464 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grgmf\" (UniqueName: \"kubernetes.io/projected/10770f18-c298-4a77-a412-4f71ec5edd95-kube-api-access-grgmf\") pod \"dnsmasq-dns-86db49b7ff-wb8fg\" (UID: \"10770f18-c298-4a77-a412-4f71ec5edd95\") " pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.525746 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grgmf\" (UniqueName: \"kubernetes.io/projected/10770f18-c298-4a77-a412-4f71ec5edd95-kube-api-access-grgmf\") pod \"dnsmasq-dns-86db49b7ff-wb8fg\" (UID: \"10770f18-c298-4a77-a412-4f71ec5edd95\") " pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.525857 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/10770f18-c298-4a77-a412-4f71ec5edd95-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-wb8fg\" (UID: \"10770f18-c298-4a77-a412-4f71ec5edd95\") " pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.525908 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/10770f18-c298-4a77-a412-4f71ec5edd95-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-wb8fg\" (UID: \"10770f18-c298-4a77-a412-4f71ec5edd95\") " pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.525943 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/10770f18-c298-4a77-a412-4f71ec5edd95-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-wb8fg\" (UID: \"10770f18-c298-4a77-a412-4f71ec5edd95\") " pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.525970 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10770f18-c298-4a77-a412-4f71ec5edd95-config\") pod \"dnsmasq-dns-86db49b7ff-wb8fg\" (UID: \"10770f18-c298-4a77-a412-4f71ec5edd95\") " pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.526959 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10770f18-c298-4a77-a412-4f71ec5edd95-config\") pod \"dnsmasq-dns-86db49b7ff-wb8fg\" (UID: \"10770f18-c298-4a77-a412-4f71ec5edd95\") " pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.527768 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/10770f18-c298-4a77-a412-4f71ec5edd95-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-wb8fg\" (UID: \"10770f18-c298-4a77-a412-4f71ec5edd95\") " pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.528363 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/10770f18-c298-4a77-a412-4f71ec5edd95-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-wb8fg\" (UID: \"10770f18-c298-4a77-a412-4f71ec5edd95\") " pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.528740 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/10770f18-c298-4a77-a412-4f71ec5edd95-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-wb8fg\" (UID: \"10770f18-c298-4a77-a412-4f71ec5edd95\") " pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.546964 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grgmf\" (UniqueName: \"kubernetes.io/projected/10770f18-c298-4a77-a412-4f71ec5edd95-kube-api-access-grgmf\") pod \"dnsmasq-dns-86db49b7ff-wb8fg\" (UID: \"10770f18-c298-4a77-a412-4f71ec5edd95\") " pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.644196 4693 generic.go:334] "Generic (PLEG): container finished" podID="4b35ec4a-da73-4069-b14e-341b56e5718f" containerID="d1f5a8d49568b6a405cfee200964960a4c9bc29d1d56de13177cde31961c73a0" exitCode=0 Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.644319 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-r8hm9" event={"ID":"4b35ec4a-da73-4069-b14e-341b56e5718f","Type":"ContainerDied","Data":"d1f5a8d49568b6a405cfee200964960a4c9bc29d1d56de13177cde31961c73a0"} Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.645501 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.662065 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.671324 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-pd96z" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.701595 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.731219 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hdp5f\" (UniqueName: \"kubernetes.io/projected/e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204-kube-api-access-hdp5f\") pod \"e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204\" (UID: \"e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204\") " Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.731326 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204-config\") pod \"e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204\" (UID: \"e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204\") " Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.731442 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204-dns-svc\") pod \"e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204\" (UID: \"e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204\") " Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.734087 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204" (UID: "e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.736402 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204-config" (OuterVolumeSpecName: "config") pod "e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204" (UID: "e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.742024 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204-kube-api-access-hdp5f" (OuterVolumeSpecName: "kube-api-access-hdp5f") pod "e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204" (UID: "e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204"). InnerVolumeSpecName "kube-api-access-hdp5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.833502 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.833536 4693 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.833579 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hdp5f\" (UniqueName: \"kubernetes.io/projected/e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204-kube-api-access-hdp5f\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.883044 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-47xpx"] Oct 08 07:32:21 crc kubenswrapper[4693]: W1008 07:32:21.888387 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddd6332ac_70b3_4137_9419_3d394f270aa3.slice/crio-73ba49926f0da88f366f159988b8ca6411165d3bfacb662a95ac364b048ab5ce WatchSource:0}: Error finding container 73ba49926f0da88f366f159988b8ca6411165d3bfacb662a95ac364b048ab5ce: Status 404 returned error can't find the container with id 73ba49926f0da88f366f159988b8ca6411165d3bfacb662a95ac364b048ab5ce Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.987493 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.988907 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.995720 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.995952 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-7skz7" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.996136 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Oct 08 07:32:21 crc kubenswrapper[4693]: I1008 07:32:21.996262 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.005592 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.007261 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-r8hm9" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.012851 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-cggg7"] Oct 08 07:32:22 crc kubenswrapper[4693]: W1008 07:32:22.027602 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod61611c91_8e6e_451f_9964_4b05629e8727.slice/crio-64bcdea5e72c134769bb5c3ad176a5b0c257124d82ed5c1dbc7965ad9d599d36 WatchSource:0}: Error finding container 64bcdea5e72c134769bb5c3ad176a5b0c257124d82ed5c1dbc7965ad9d599d36: Status 404 returned error can't find the container with id 64bcdea5e72c134769bb5c3ad176a5b0c257124d82ed5c1dbc7965ad9d599d36 Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.050496 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b35ec4a-da73-4069-b14e-341b56e5718f-dns-svc\") pod \"4b35ec4a-da73-4069-b14e-341b56e5718f\" (UID: \"4b35ec4a-da73-4069-b14e-341b56e5718f\") " Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.050529 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wf965\" (UniqueName: \"kubernetes.io/projected/4b35ec4a-da73-4069-b14e-341b56e5718f-kube-api-access-wf965\") pod \"4b35ec4a-da73-4069-b14e-341b56e5718f\" (UID: \"4b35ec4a-da73-4069-b14e-341b56e5718f\") " Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.050582 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b35ec4a-da73-4069-b14e-341b56e5718f-config\") pod \"4b35ec4a-da73-4069-b14e-341b56e5718f\" (UID: \"4b35ec4a-da73-4069-b14e-341b56e5718f\") " Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.050844 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e01f071e-63e7-4a6e-b321-5f489621b814-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"e01f071e-63e7-4a6e-b321-5f489621b814\") " pod="openstack/ovn-northd-0" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.050898 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e01f071e-63e7-4a6e-b321-5f489621b814-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"e01f071e-63e7-4a6e-b321-5f489621b814\") " pod="openstack/ovn-northd-0" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.050922 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e01f071e-63e7-4a6e-b321-5f489621b814-scripts\") pod \"ovn-northd-0\" (UID: \"e01f071e-63e7-4a6e-b321-5f489621b814\") " pod="openstack/ovn-northd-0" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.050939 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvn5h\" (UniqueName: \"kubernetes.io/projected/e01f071e-63e7-4a6e-b321-5f489621b814-kube-api-access-gvn5h\") pod \"ovn-northd-0\" (UID: \"e01f071e-63e7-4a6e-b321-5f489621b814\") " pod="openstack/ovn-northd-0" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.050973 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e01f071e-63e7-4a6e-b321-5f489621b814-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"e01f071e-63e7-4a6e-b321-5f489621b814\") " pod="openstack/ovn-northd-0" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.050993 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e01f071e-63e7-4a6e-b321-5f489621b814-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"e01f071e-63e7-4a6e-b321-5f489621b814\") " pod="openstack/ovn-northd-0" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.051023 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e01f071e-63e7-4a6e-b321-5f489621b814-config\") pod \"ovn-northd-0\" (UID: \"e01f071e-63e7-4a6e-b321-5f489621b814\") " pod="openstack/ovn-northd-0" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.055731 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b35ec4a-da73-4069-b14e-341b56e5718f-kube-api-access-wf965" (OuterVolumeSpecName: "kube-api-access-wf965") pod "4b35ec4a-da73-4069-b14e-341b56e5718f" (UID: "4b35ec4a-da73-4069-b14e-341b56e5718f"). InnerVolumeSpecName "kube-api-access-wf965". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.068060 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b35ec4a-da73-4069-b14e-341b56e5718f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4b35ec4a-da73-4069-b14e-341b56e5718f" (UID: "4b35ec4a-da73-4069-b14e-341b56e5718f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.086397 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b35ec4a-da73-4069-b14e-341b56e5718f-config" (OuterVolumeSpecName: "config") pod "4b35ec4a-da73-4069-b14e-341b56e5718f" (UID: "4b35ec4a-da73-4069-b14e-341b56e5718f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.152764 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e01f071e-63e7-4a6e-b321-5f489621b814-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"e01f071e-63e7-4a6e-b321-5f489621b814\") " pod="openstack/ovn-northd-0" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.152844 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e01f071e-63e7-4a6e-b321-5f489621b814-scripts\") pod \"ovn-northd-0\" (UID: \"e01f071e-63e7-4a6e-b321-5f489621b814\") " pod="openstack/ovn-northd-0" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.152861 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvn5h\" (UniqueName: \"kubernetes.io/projected/e01f071e-63e7-4a6e-b321-5f489621b814-kube-api-access-gvn5h\") pod \"ovn-northd-0\" (UID: \"e01f071e-63e7-4a6e-b321-5f489621b814\") " pod="openstack/ovn-northd-0" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.152897 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e01f071e-63e7-4a6e-b321-5f489621b814-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"e01f071e-63e7-4a6e-b321-5f489621b814\") " pod="openstack/ovn-northd-0" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.152918 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e01f071e-63e7-4a6e-b321-5f489621b814-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"e01f071e-63e7-4a6e-b321-5f489621b814\") " pod="openstack/ovn-northd-0" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.152944 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e01f071e-63e7-4a6e-b321-5f489621b814-config\") pod \"ovn-northd-0\" (UID: \"e01f071e-63e7-4a6e-b321-5f489621b814\") " pod="openstack/ovn-northd-0" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.152987 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e01f071e-63e7-4a6e-b321-5f489621b814-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"e01f071e-63e7-4a6e-b321-5f489621b814\") " pod="openstack/ovn-northd-0" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.153037 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b35ec4a-da73-4069-b14e-341b56e5718f-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.153048 4693 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b35ec4a-da73-4069-b14e-341b56e5718f-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.153057 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wf965\" (UniqueName: \"kubernetes.io/projected/4b35ec4a-da73-4069-b14e-341b56e5718f-kube-api-access-wf965\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.153507 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e01f071e-63e7-4a6e-b321-5f489621b814-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"e01f071e-63e7-4a6e-b321-5f489621b814\") " pod="openstack/ovn-northd-0" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.154406 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e01f071e-63e7-4a6e-b321-5f489621b814-config\") pod \"ovn-northd-0\" (UID: \"e01f071e-63e7-4a6e-b321-5f489621b814\") " pod="openstack/ovn-northd-0" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.154491 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e01f071e-63e7-4a6e-b321-5f489621b814-scripts\") pod \"ovn-northd-0\" (UID: \"e01f071e-63e7-4a6e-b321-5f489621b814\") " pod="openstack/ovn-northd-0" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.158218 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e01f071e-63e7-4a6e-b321-5f489621b814-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"e01f071e-63e7-4a6e-b321-5f489621b814\") " pod="openstack/ovn-northd-0" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.158505 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e01f071e-63e7-4a6e-b321-5f489621b814-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"e01f071e-63e7-4a6e-b321-5f489621b814\") " pod="openstack/ovn-northd-0" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.159418 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e01f071e-63e7-4a6e-b321-5f489621b814-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"e01f071e-63e7-4a6e-b321-5f489621b814\") " pod="openstack/ovn-northd-0" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.169134 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-wb8fg"] Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.170372 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvn5h\" (UniqueName: \"kubernetes.io/projected/e01f071e-63e7-4a6e-b321-5f489621b814-kube-api-access-gvn5h\") pod \"ovn-northd-0\" (UID: \"e01f071e-63e7-4a6e-b321-5f489621b814\") " pod="openstack/ovn-northd-0" Oct 08 07:32:22 crc kubenswrapper[4693]: W1008 07:32:22.190720 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod10770f18_c298_4a77_a412_4f71ec5edd95.slice/crio-be6f3e5c0f5defc67f50a548f2f96bb84f4997db41bcb934904ba73de490c0e3 WatchSource:0}: Error finding container be6f3e5c0f5defc67f50a548f2f96bb84f4997db41bcb934904ba73de490c0e3: Status 404 returned error can't find the container with id be6f3e5c0f5defc67f50a548f2f96bb84f4997db41bcb934904ba73de490c0e3 Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.315315 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.653034 4693 generic.go:334] "Generic (PLEG): container finished" podID="10770f18-c298-4a77-a412-4f71ec5edd95" containerID="2604c37490cbb9fd01d51b7686d56b449b71ce6fbea561da82b74807711c3e07" exitCode=0 Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.653135 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" event={"ID":"10770f18-c298-4a77-a412-4f71ec5edd95","Type":"ContainerDied","Data":"2604c37490cbb9fd01d51b7686d56b449b71ce6fbea561da82b74807711c3e07"} Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.653375 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" event={"ID":"10770f18-c298-4a77-a412-4f71ec5edd95","Type":"ContainerStarted","Data":"be6f3e5c0f5defc67f50a548f2f96bb84f4997db41bcb934904ba73de490c0e3"} Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.655064 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-47xpx" event={"ID":"dd6332ac-70b3-4137-9419-3d394f270aa3","Type":"ContainerStarted","Data":"7a858416f9bc6e15e65cf7df266e88bcd2556e7532cfea6dc1e1826e291694fd"} Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.655104 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-47xpx" event={"ID":"dd6332ac-70b3-4137-9419-3d394f270aa3","Type":"ContainerStarted","Data":"73ba49926f0da88f366f159988b8ca6411165d3bfacb662a95ac364b048ab5ce"} Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.656321 4693 generic.go:334] "Generic (PLEG): container finished" podID="61611c91-8e6e-451f-9964-4b05629e8727" containerID="1d1e1ae9b9d699690b05b0b54bad52b19bd8de1bc403dfbdd09c70c6ab2d9ee9" exitCode=0 Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.656352 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-cggg7" event={"ID":"61611c91-8e6e-451f-9964-4b05629e8727","Type":"ContainerDied","Data":"1d1e1ae9b9d699690b05b0b54bad52b19bd8de1bc403dfbdd09c70c6ab2d9ee9"} Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.656393 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-cggg7" event={"ID":"61611c91-8e6e-451f-9964-4b05629e8727","Type":"ContainerStarted","Data":"64bcdea5e72c134769bb5c3ad176a5b0c257124d82ed5c1dbc7965ad9d599d36"} Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.657658 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-r8hm9" event={"ID":"4b35ec4a-da73-4069-b14e-341b56e5718f","Type":"ContainerDied","Data":"2be601f45da6d1679237ce1dd71d5c8fbd0b776be525e086c193cb0649e52d75"} Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.657668 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-r8hm9" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.657699 4693 scope.go:117] "RemoveContainer" containerID="d1f5a8d49568b6a405cfee200964960a4c9bc29d1d56de13177cde31961c73a0" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.662764 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-pd96z" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.662823 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-pd96z" event={"ID":"e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204","Type":"ContainerDied","Data":"e479e873aa5d2d5d68361a17920f0696aebe2f8e37c9e6e8191ca76bc5b5d9e1"} Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.737698 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-47xpx" podStartSLOduration=2.737678641 podStartE2EDuration="2.737678641s" podCreationTimestamp="2025-10-08 07:32:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:32:22.732385461 +0000 UTC m=+928.103350396" watchObservedRunningTime="2025-10-08 07:32:22.737678641 +0000 UTC m=+928.108643586" Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.786990 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.809824 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-pd96z"] Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.818266 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-pd96z"] Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.851854 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-r8hm9"] Oct 08 07:32:22 crc kubenswrapper[4693]: I1008 07:32:22.855845 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-r8hm9"] Oct 08 07:32:23 crc kubenswrapper[4693]: I1008 07:32:23.145682 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Oct 08 07:32:23 crc kubenswrapper[4693]: I1008 07:32:23.145801 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Oct 08 07:32:23 crc kubenswrapper[4693]: I1008 07:32:23.373906 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b35ec4a-da73-4069-b14e-341b56e5718f" path="/var/lib/kubelet/pods/4b35ec4a-da73-4069-b14e-341b56e5718f/volumes" Oct 08 07:32:23 crc kubenswrapper[4693]: I1008 07:32:23.374418 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204" path="/var/lib/kubelet/pods/e1fe4aa6-0d7d-4cb4-abbf-d90087a8b204/volumes" Oct 08 07:32:23 crc kubenswrapper[4693]: I1008 07:32:23.489608 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:32:23 crc kubenswrapper[4693]: I1008 07:32:23.489658 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:32:23 crc kubenswrapper[4693]: I1008 07:32:23.489698 4693 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 07:32:23 crc kubenswrapper[4693]: I1008 07:32:23.490317 4693 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2d5b335782865c10d71fa446e1c1690f0fdc6f76d8d4163cb446a08bd0b03853"} pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 08 07:32:23 crc kubenswrapper[4693]: I1008 07:32:23.490366 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" containerID="cri-o://2d5b335782865c10d71fa446e1c1690f0fdc6f76d8d4163cb446a08bd0b03853" gracePeriod=600 Oct 08 07:32:23 crc kubenswrapper[4693]: I1008 07:32:23.673214 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-cggg7" event={"ID":"61611c91-8e6e-451f-9964-4b05629e8727","Type":"ContainerStarted","Data":"a328874665b3d940bb37c406b933f3bd1020ceb1d15243b97cc08df4a05a6578"} Oct 08 07:32:23 crc kubenswrapper[4693]: I1008 07:32:23.673464 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7fd796d7df-cggg7" Oct 08 07:32:23 crc kubenswrapper[4693]: I1008 07:32:23.679663 4693 generic.go:334] "Generic (PLEG): container finished" podID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerID="2d5b335782865c10d71fa446e1c1690f0fdc6f76d8d4163cb446a08bd0b03853" exitCode=0 Oct 08 07:32:23 crc kubenswrapper[4693]: I1008 07:32:23.679740 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerDied","Data":"2d5b335782865c10d71fa446e1c1690f0fdc6f76d8d4163cb446a08bd0b03853"} Oct 08 07:32:23 crc kubenswrapper[4693]: I1008 07:32:23.679907 4693 scope.go:117] "RemoveContainer" containerID="9384b82d3cba6b720e3fd8f3218eba5a267966bc2aa3a68986f9c6356d620303" Oct 08 07:32:23 crc kubenswrapper[4693]: I1008 07:32:23.681463 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"e01f071e-63e7-4a6e-b321-5f489621b814","Type":"ContainerStarted","Data":"408115eea2c1b848e6aefce046397ebc69d0166b144595641d511ede18718ede"} Oct 08 07:32:23 crc kubenswrapper[4693]: I1008 07:32:23.686126 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" event={"ID":"10770f18-c298-4a77-a412-4f71ec5edd95","Type":"ContainerStarted","Data":"9eed01ec4a36ff05bbb762f10362a709c15e11b5e3384ac6a5f0cc9eef5deba3"} Oct 08 07:32:23 crc kubenswrapper[4693]: I1008 07:32:23.686167 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" Oct 08 07:32:23 crc kubenswrapper[4693]: I1008 07:32:23.703016 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7fd796d7df-cggg7" podStartSLOduration=3.702998011 podStartE2EDuration="3.702998011s" podCreationTimestamp="2025-10-08 07:32:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:32:23.694660241 +0000 UTC m=+929.065625186" watchObservedRunningTime="2025-10-08 07:32:23.702998011 +0000 UTC m=+929.073962946" Oct 08 07:32:23 crc kubenswrapper[4693]: I1008 07:32:23.724905 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" podStartSLOduration=2.724890328 podStartE2EDuration="2.724890328s" podCreationTimestamp="2025-10-08 07:32:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:32:23.723665256 +0000 UTC m=+929.094630191" watchObservedRunningTime="2025-10-08 07:32:23.724890328 +0000 UTC m=+929.095855263" Oct 08 07:32:24 crc kubenswrapper[4693]: I1008 07:32:24.188053 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Oct 08 07:32:24 crc kubenswrapper[4693]: I1008 07:32:24.188105 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Oct 08 07:32:24 crc kubenswrapper[4693]: I1008 07:32:24.539169 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Oct 08 07:32:24 crc kubenswrapper[4693]: I1008 07:32:24.697049 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerStarted","Data":"622f06c0bb6cdaa5465830b91799216c70f2eacc877b5e7e53cedc7bd9a96277"} Oct 08 07:32:25 crc kubenswrapper[4693]: I1008 07:32:25.252944 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Oct 08 07:32:25 crc kubenswrapper[4693]: I1008 07:32:25.342887 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Oct 08 07:32:25 crc kubenswrapper[4693]: I1008 07:32:25.709260 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"e01f071e-63e7-4a6e-b321-5f489621b814","Type":"ContainerStarted","Data":"3a3cf87f1f823bc028212653d5968a247ea6cc2203cef4ea7559ff49a744d121"} Oct 08 07:32:25 crc kubenswrapper[4693]: I1008 07:32:25.709345 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"e01f071e-63e7-4a6e-b321-5f489621b814","Type":"ContainerStarted","Data":"0dc3ce56dbe255f8af9cb3f1c35c77647cb8fcc6f947fc969ef5a7dfd2d47d0a"} Oct 08 07:32:25 crc kubenswrapper[4693]: I1008 07:32:25.743078 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=3.043542753 podStartE2EDuration="4.743055798s" podCreationTimestamp="2025-10-08 07:32:21 +0000 UTC" firstStartedPulling="2025-10-08 07:32:22.806559507 +0000 UTC m=+928.177524442" lastFinishedPulling="2025-10-08 07:32:24.506072552 +0000 UTC m=+929.877037487" observedRunningTime="2025-10-08 07:32:25.732062808 +0000 UTC m=+931.103027743" watchObservedRunningTime="2025-10-08 07:32:25.743055798 +0000 UTC m=+931.114020723" Oct 08 07:32:26 crc kubenswrapper[4693]: I1008 07:32:26.212621 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-cggg7"] Oct 08 07:32:26 crc kubenswrapper[4693]: I1008 07:32:26.247802 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-c6w9b"] Oct 08 07:32:26 crc kubenswrapper[4693]: E1008 07:32:26.248124 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b35ec4a-da73-4069-b14e-341b56e5718f" containerName="init" Oct 08 07:32:26 crc kubenswrapper[4693]: I1008 07:32:26.248139 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b35ec4a-da73-4069-b14e-341b56e5718f" containerName="init" Oct 08 07:32:26 crc kubenswrapper[4693]: I1008 07:32:26.248317 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b35ec4a-da73-4069-b14e-341b56e5718f" containerName="init" Oct 08 07:32:26 crc kubenswrapper[4693]: I1008 07:32:26.249091 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-c6w9b" Oct 08 07:32:26 crc kubenswrapper[4693]: I1008 07:32:26.267753 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 08 07:32:26 crc kubenswrapper[4693]: I1008 07:32:26.324658 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-c6w9b"] Oct 08 07:32:26 crc kubenswrapper[4693]: I1008 07:32:26.329038 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e1409895-3bad-488c-a31c-2c4ed9b75d1c-dns-svc\") pod \"dnsmasq-dns-698758b865-c6w9b\" (UID: \"e1409895-3bad-488c-a31c-2c4ed9b75d1c\") " pod="openstack/dnsmasq-dns-698758b865-c6w9b" Oct 08 07:32:26 crc kubenswrapper[4693]: I1008 07:32:26.329088 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1409895-3bad-488c-a31c-2c4ed9b75d1c-config\") pod \"dnsmasq-dns-698758b865-c6w9b\" (UID: \"e1409895-3bad-488c-a31c-2c4ed9b75d1c\") " pod="openstack/dnsmasq-dns-698758b865-c6w9b" Oct 08 07:32:26 crc kubenswrapper[4693]: I1008 07:32:26.329114 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e1409895-3bad-488c-a31c-2c4ed9b75d1c-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-c6w9b\" (UID: \"e1409895-3bad-488c-a31c-2c4ed9b75d1c\") " pod="openstack/dnsmasq-dns-698758b865-c6w9b" Oct 08 07:32:26 crc kubenswrapper[4693]: I1008 07:32:26.329230 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qr9sh\" (UniqueName: \"kubernetes.io/projected/e1409895-3bad-488c-a31c-2c4ed9b75d1c-kube-api-access-qr9sh\") pod \"dnsmasq-dns-698758b865-c6w9b\" (UID: \"e1409895-3bad-488c-a31c-2c4ed9b75d1c\") " pod="openstack/dnsmasq-dns-698758b865-c6w9b" Oct 08 07:32:26 crc kubenswrapper[4693]: I1008 07:32:26.329283 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e1409895-3bad-488c-a31c-2c4ed9b75d1c-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-c6w9b\" (UID: \"e1409895-3bad-488c-a31c-2c4ed9b75d1c\") " pod="openstack/dnsmasq-dns-698758b865-c6w9b" Oct 08 07:32:26 crc kubenswrapper[4693]: I1008 07:32:26.430743 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qr9sh\" (UniqueName: \"kubernetes.io/projected/e1409895-3bad-488c-a31c-2c4ed9b75d1c-kube-api-access-qr9sh\") pod \"dnsmasq-dns-698758b865-c6w9b\" (UID: \"e1409895-3bad-488c-a31c-2c4ed9b75d1c\") " pod="openstack/dnsmasq-dns-698758b865-c6w9b" Oct 08 07:32:26 crc kubenswrapper[4693]: I1008 07:32:26.430833 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e1409895-3bad-488c-a31c-2c4ed9b75d1c-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-c6w9b\" (UID: \"e1409895-3bad-488c-a31c-2c4ed9b75d1c\") " pod="openstack/dnsmasq-dns-698758b865-c6w9b" Oct 08 07:32:26 crc kubenswrapper[4693]: I1008 07:32:26.430861 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e1409895-3bad-488c-a31c-2c4ed9b75d1c-dns-svc\") pod \"dnsmasq-dns-698758b865-c6w9b\" (UID: \"e1409895-3bad-488c-a31c-2c4ed9b75d1c\") " pod="openstack/dnsmasq-dns-698758b865-c6w9b" Oct 08 07:32:26 crc kubenswrapper[4693]: I1008 07:32:26.430884 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1409895-3bad-488c-a31c-2c4ed9b75d1c-config\") pod \"dnsmasq-dns-698758b865-c6w9b\" (UID: \"e1409895-3bad-488c-a31c-2c4ed9b75d1c\") " pod="openstack/dnsmasq-dns-698758b865-c6w9b" Oct 08 07:32:26 crc kubenswrapper[4693]: I1008 07:32:26.430901 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e1409895-3bad-488c-a31c-2c4ed9b75d1c-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-c6w9b\" (UID: \"e1409895-3bad-488c-a31c-2c4ed9b75d1c\") " pod="openstack/dnsmasq-dns-698758b865-c6w9b" Oct 08 07:32:26 crc kubenswrapper[4693]: I1008 07:32:26.431965 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e1409895-3bad-488c-a31c-2c4ed9b75d1c-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-c6w9b\" (UID: \"e1409895-3bad-488c-a31c-2c4ed9b75d1c\") " pod="openstack/dnsmasq-dns-698758b865-c6w9b" Oct 08 07:32:26 crc kubenswrapper[4693]: I1008 07:32:26.432044 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e1409895-3bad-488c-a31c-2c4ed9b75d1c-dns-svc\") pod \"dnsmasq-dns-698758b865-c6w9b\" (UID: \"e1409895-3bad-488c-a31c-2c4ed9b75d1c\") " pod="openstack/dnsmasq-dns-698758b865-c6w9b" Oct 08 07:32:26 crc kubenswrapper[4693]: I1008 07:32:26.432779 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1409895-3bad-488c-a31c-2c4ed9b75d1c-config\") pod \"dnsmasq-dns-698758b865-c6w9b\" (UID: \"e1409895-3bad-488c-a31c-2c4ed9b75d1c\") " pod="openstack/dnsmasq-dns-698758b865-c6w9b" Oct 08 07:32:26 crc kubenswrapper[4693]: I1008 07:32:26.432931 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e1409895-3bad-488c-a31c-2c4ed9b75d1c-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-c6w9b\" (UID: \"e1409895-3bad-488c-a31c-2c4ed9b75d1c\") " pod="openstack/dnsmasq-dns-698758b865-c6w9b" Oct 08 07:32:26 crc kubenswrapper[4693]: I1008 07:32:26.448928 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qr9sh\" (UniqueName: \"kubernetes.io/projected/e1409895-3bad-488c-a31c-2c4ed9b75d1c-kube-api-access-qr9sh\") pod \"dnsmasq-dns-698758b865-c6w9b\" (UID: \"e1409895-3bad-488c-a31c-2c4ed9b75d1c\") " pod="openstack/dnsmasq-dns-698758b865-c6w9b" Oct 08 07:32:26 crc kubenswrapper[4693]: I1008 07:32:26.563579 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-c6w9b" Oct 08 07:32:26 crc kubenswrapper[4693]: I1008 07:32:26.720557 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Oct 08 07:32:26 crc kubenswrapper[4693]: I1008 07:32:26.720705 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7fd796d7df-cggg7" podUID="61611c91-8e6e-451f-9964-4b05629e8727" containerName="dnsmasq-dns" containerID="cri-o://a328874665b3d940bb37c406b933f3bd1020ceb1d15243b97cc08df4a05a6578" gracePeriod=10 Oct 08 07:32:26 crc kubenswrapper[4693]: I1008 07:32:26.854158 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Oct 08 07:32:26 crc kubenswrapper[4693]: I1008 07:32:26.911308 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.058909 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-c6w9b"] Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.134083 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-cggg7" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.241392 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61611c91-8e6e-451f-9964-4b05629e8727-config\") pod \"61611c91-8e6e-451f-9964-4b05629e8727\" (UID: \"61611c91-8e6e-451f-9964-4b05629e8727\") " Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.241441 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zp2b\" (UniqueName: \"kubernetes.io/projected/61611c91-8e6e-451f-9964-4b05629e8727-kube-api-access-4zp2b\") pod \"61611c91-8e6e-451f-9964-4b05629e8727\" (UID: \"61611c91-8e6e-451f-9964-4b05629e8727\") " Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.241471 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/61611c91-8e6e-451f-9964-4b05629e8727-ovsdbserver-nb\") pod \"61611c91-8e6e-451f-9964-4b05629e8727\" (UID: \"61611c91-8e6e-451f-9964-4b05629e8727\") " Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.241524 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/61611c91-8e6e-451f-9964-4b05629e8727-dns-svc\") pod \"61611c91-8e6e-451f-9964-4b05629e8727\" (UID: \"61611c91-8e6e-451f-9964-4b05629e8727\") " Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.252600 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61611c91-8e6e-451f-9964-4b05629e8727-kube-api-access-4zp2b" (OuterVolumeSpecName: "kube-api-access-4zp2b") pod "61611c91-8e6e-451f-9964-4b05629e8727" (UID: "61611c91-8e6e-451f-9964-4b05629e8727"). InnerVolumeSpecName "kube-api-access-4zp2b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.282578 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61611c91-8e6e-451f-9964-4b05629e8727-config" (OuterVolumeSpecName: "config") pod "61611c91-8e6e-451f-9964-4b05629e8727" (UID: "61611c91-8e6e-451f-9964-4b05629e8727"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.294655 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61611c91-8e6e-451f-9964-4b05629e8727-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "61611c91-8e6e-451f-9964-4b05629e8727" (UID: "61611c91-8e6e-451f-9964-4b05629e8727"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.300840 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61611c91-8e6e-451f-9964-4b05629e8727-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "61611c91-8e6e-451f-9964-4b05629e8727" (UID: "61611c91-8e6e-451f-9964-4b05629e8727"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.347792 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61611c91-8e6e-451f-9964-4b05629e8727-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.347876 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zp2b\" (UniqueName: \"kubernetes.io/projected/61611c91-8e6e-451f-9964-4b05629e8727-kube-api-access-4zp2b\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.347894 4693 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/61611c91-8e6e-451f-9964-4b05629e8727-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.347933 4693 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/61611c91-8e6e-451f-9964-4b05629e8727-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.351619 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Oct 08 07:32:27 crc kubenswrapper[4693]: E1008 07:32:27.352026 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61611c91-8e6e-451f-9964-4b05629e8727" containerName="init" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.352041 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="61611c91-8e6e-451f-9964-4b05629e8727" containerName="init" Oct 08 07:32:27 crc kubenswrapper[4693]: E1008 07:32:27.352085 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61611c91-8e6e-451f-9964-4b05629e8727" containerName="dnsmasq-dns" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.352094 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="61611c91-8e6e-451f-9964-4b05629e8727" containerName="dnsmasq-dns" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.352312 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="61611c91-8e6e-451f-9964-4b05629e8727" containerName="dnsmasq-dns" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.357838 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.359056 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-wwghj" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.359922 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.360683 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.361168 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.388883 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.449709 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smz8l\" (UniqueName: \"kubernetes.io/projected/298a15e0-992f-4f83-8067-7e8e6aa47b89-kube-api-access-smz8l\") pod \"swift-storage-0\" (UID: \"298a15e0-992f-4f83-8067-7e8e6aa47b89\") " pod="openstack/swift-storage-0" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.449791 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/298a15e0-992f-4f83-8067-7e8e6aa47b89-lock\") pod \"swift-storage-0\" (UID: \"298a15e0-992f-4f83-8067-7e8e6aa47b89\") " pod="openstack/swift-storage-0" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.449838 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/298a15e0-992f-4f83-8067-7e8e6aa47b89-cache\") pod \"swift-storage-0\" (UID: \"298a15e0-992f-4f83-8067-7e8e6aa47b89\") " pod="openstack/swift-storage-0" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.449863 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/298a15e0-992f-4f83-8067-7e8e6aa47b89-etc-swift\") pod \"swift-storage-0\" (UID: \"298a15e0-992f-4f83-8067-7e8e6aa47b89\") " pod="openstack/swift-storage-0" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.449906 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"298a15e0-992f-4f83-8067-7e8e6aa47b89\") " pod="openstack/swift-storage-0" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.551939 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/298a15e0-992f-4f83-8067-7e8e6aa47b89-cache\") pod \"swift-storage-0\" (UID: \"298a15e0-992f-4f83-8067-7e8e6aa47b89\") " pod="openstack/swift-storage-0" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.551993 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/298a15e0-992f-4f83-8067-7e8e6aa47b89-etc-swift\") pod \"swift-storage-0\" (UID: \"298a15e0-992f-4f83-8067-7e8e6aa47b89\") " pod="openstack/swift-storage-0" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.552051 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"298a15e0-992f-4f83-8067-7e8e6aa47b89\") " pod="openstack/swift-storage-0" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.552264 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smz8l\" (UniqueName: \"kubernetes.io/projected/298a15e0-992f-4f83-8067-7e8e6aa47b89-kube-api-access-smz8l\") pod \"swift-storage-0\" (UID: \"298a15e0-992f-4f83-8067-7e8e6aa47b89\") " pod="openstack/swift-storage-0" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.552305 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/298a15e0-992f-4f83-8067-7e8e6aa47b89-lock\") pod \"swift-storage-0\" (UID: \"298a15e0-992f-4f83-8067-7e8e6aa47b89\") " pod="openstack/swift-storage-0" Oct 08 07:32:27 crc kubenswrapper[4693]: E1008 07:32:27.552713 4693 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 08 07:32:27 crc kubenswrapper[4693]: E1008 07:32:27.552781 4693 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 08 07:32:27 crc kubenswrapper[4693]: E1008 07:32:27.552897 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/298a15e0-992f-4f83-8067-7e8e6aa47b89-etc-swift podName:298a15e0-992f-4f83-8067-7e8e6aa47b89 nodeName:}" failed. No retries permitted until 2025-10-08 07:32:28.05286281 +0000 UTC m=+933.423827785 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/298a15e0-992f-4f83-8067-7e8e6aa47b89-etc-swift") pod "swift-storage-0" (UID: "298a15e0-992f-4f83-8067-7e8e6aa47b89") : configmap "swift-ring-files" not found Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.552961 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/298a15e0-992f-4f83-8067-7e8e6aa47b89-lock\") pod \"swift-storage-0\" (UID: \"298a15e0-992f-4f83-8067-7e8e6aa47b89\") " pod="openstack/swift-storage-0" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.552900 4693 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"298a15e0-992f-4f83-8067-7e8e6aa47b89\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/swift-storage-0" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.552715 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/298a15e0-992f-4f83-8067-7e8e6aa47b89-cache\") pod \"swift-storage-0\" (UID: \"298a15e0-992f-4f83-8067-7e8e6aa47b89\") " pod="openstack/swift-storage-0" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.577686 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smz8l\" (UniqueName: \"kubernetes.io/projected/298a15e0-992f-4f83-8067-7e8e6aa47b89-kube-api-access-smz8l\") pod \"swift-storage-0\" (UID: \"298a15e0-992f-4f83-8067-7e8e6aa47b89\") " pod="openstack/swift-storage-0" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.588469 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"298a15e0-992f-4f83-8067-7e8e6aa47b89\") " pod="openstack/swift-storage-0" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.732113 4693 generic.go:334] "Generic (PLEG): container finished" podID="61611c91-8e6e-451f-9964-4b05629e8727" containerID="a328874665b3d940bb37c406b933f3bd1020ceb1d15243b97cc08df4a05a6578" exitCode=0 Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.732171 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-cggg7" event={"ID":"61611c91-8e6e-451f-9964-4b05629e8727","Type":"ContainerDied","Data":"a328874665b3d940bb37c406b933f3bd1020ceb1d15243b97cc08df4a05a6578"} Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.732230 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-cggg7" event={"ID":"61611c91-8e6e-451f-9964-4b05629e8727","Type":"ContainerDied","Data":"64bcdea5e72c134769bb5c3ad176a5b0c257124d82ed5c1dbc7965ad9d599d36"} Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.732252 4693 scope.go:117] "RemoveContainer" containerID="a328874665b3d940bb37c406b933f3bd1020ceb1d15243b97cc08df4a05a6578" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.732579 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-cggg7" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.738194 4693 generic.go:334] "Generic (PLEG): container finished" podID="e1409895-3bad-488c-a31c-2c4ed9b75d1c" containerID="03c3df716366d99c8ccfa215ed3d84375ccf8bbcf5951bbfc75b536283b119b5" exitCode=0 Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.738293 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-c6w9b" event={"ID":"e1409895-3bad-488c-a31c-2c4ed9b75d1c","Type":"ContainerDied","Data":"03c3df716366d99c8ccfa215ed3d84375ccf8bbcf5951bbfc75b536283b119b5"} Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.738356 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-c6w9b" event={"ID":"e1409895-3bad-488c-a31c-2c4ed9b75d1c","Type":"ContainerStarted","Data":"26507a9e77794d1a4247652f7376a15f18d536e2037432b292ce555c208b2a86"} Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.762508 4693 scope.go:117] "RemoveContainer" containerID="1d1e1ae9b9d699690b05b0b54bad52b19bd8de1bc403dfbdd09c70c6ab2d9ee9" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.881081 4693 scope.go:117] "RemoveContainer" containerID="a328874665b3d940bb37c406b933f3bd1020ceb1d15243b97cc08df4a05a6578" Oct 08 07:32:27 crc kubenswrapper[4693]: E1008 07:32:27.884043 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a328874665b3d940bb37c406b933f3bd1020ceb1d15243b97cc08df4a05a6578\": container with ID starting with a328874665b3d940bb37c406b933f3bd1020ceb1d15243b97cc08df4a05a6578 not found: ID does not exist" containerID="a328874665b3d940bb37c406b933f3bd1020ceb1d15243b97cc08df4a05a6578" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.884196 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a328874665b3d940bb37c406b933f3bd1020ceb1d15243b97cc08df4a05a6578"} err="failed to get container status \"a328874665b3d940bb37c406b933f3bd1020ceb1d15243b97cc08df4a05a6578\": rpc error: code = NotFound desc = could not find container \"a328874665b3d940bb37c406b933f3bd1020ceb1d15243b97cc08df4a05a6578\": container with ID starting with a328874665b3d940bb37c406b933f3bd1020ceb1d15243b97cc08df4a05a6578 not found: ID does not exist" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.884340 4693 scope.go:117] "RemoveContainer" containerID="1d1e1ae9b9d699690b05b0b54bad52b19bd8de1bc403dfbdd09c70c6ab2d9ee9" Oct 08 07:32:27 crc kubenswrapper[4693]: E1008 07:32:27.886029 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d1e1ae9b9d699690b05b0b54bad52b19bd8de1bc403dfbdd09c70c6ab2d9ee9\": container with ID starting with 1d1e1ae9b9d699690b05b0b54bad52b19bd8de1bc403dfbdd09c70c6ab2d9ee9 not found: ID does not exist" containerID="1d1e1ae9b9d699690b05b0b54bad52b19bd8de1bc403dfbdd09c70c6ab2d9ee9" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.886110 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d1e1ae9b9d699690b05b0b54bad52b19bd8de1bc403dfbdd09c70c6ab2d9ee9"} err="failed to get container status \"1d1e1ae9b9d699690b05b0b54bad52b19bd8de1bc403dfbdd09c70c6ab2d9ee9\": rpc error: code = NotFound desc = could not find container \"1d1e1ae9b9d699690b05b0b54bad52b19bd8de1bc403dfbdd09c70c6ab2d9ee9\": container with ID starting with 1d1e1ae9b9d699690b05b0b54bad52b19bd8de1bc403dfbdd09c70c6ab2d9ee9 not found: ID does not exist" Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.896182 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-cggg7"] Oct 08 07:32:27 crc kubenswrapper[4693]: I1008 07:32:27.908665 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-cggg7"] Oct 08 07:32:28 crc kubenswrapper[4693]: I1008 07:32:28.060647 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/298a15e0-992f-4f83-8067-7e8e6aa47b89-etc-swift\") pod \"swift-storage-0\" (UID: \"298a15e0-992f-4f83-8067-7e8e6aa47b89\") " pod="openstack/swift-storage-0" Oct 08 07:32:28 crc kubenswrapper[4693]: E1008 07:32:28.060867 4693 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 08 07:32:28 crc kubenswrapper[4693]: E1008 07:32:28.060886 4693 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 08 07:32:28 crc kubenswrapper[4693]: E1008 07:32:28.060952 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/298a15e0-992f-4f83-8067-7e8e6aa47b89-etc-swift podName:298a15e0-992f-4f83-8067-7e8e6aa47b89 nodeName:}" failed. No retries permitted until 2025-10-08 07:32:29.060930891 +0000 UTC m=+934.431895826 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/298a15e0-992f-4f83-8067-7e8e6aa47b89-etc-swift") pod "swift-storage-0" (UID: "298a15e0-992f-4f83-8067-7e8e6aa47b89") : configmap "swift-ring-files" not found Oct 08 07:32:28 crc kubenswrapper[4693]: I1008 07:32:28.749721 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-c6w9b" event={"ID":"e1409895-3bad-488c-a31c-2c4ed9b75d1c","Type":"ContainerStarted","Data":"c4c49c3604b42c51ec78370f3bec626eef69ed2e477f8751170d9f819bcb6474"} Oct 08 07:32:28 crc kubenswrapper[4693]: I1008 07:32:28.750122 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-c6w9b" Oct 08 07:32:28 crc kubenswrapper[4693]: I1008 07:32:28.785698 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-c6w9b" podStartSLOduration=2.785680096 podStartE2EDuration="2.785680096s" podCreationTimestamp="2025-10-08 07:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:32:28.779151334 +0000 UTC m=+934.150116269" watchObservedRunningTime="2025-10-08 07:32:28.785680096 +0000 UTC m=+934.156645031" Oct 08 07:32:29 crc kubenswrapper[4693]: I1008 07:32:29.074101 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/298a15e0-992f-4f83-8067-7e8e6aa47b89-etc-swift\") pod \"swift-storage-0\" (UID: \"298a15e0-992f-4f83-8067-7e8e6aa47b89\") " pod="openstack/swift-storage-0" Oct 08 07:32:29 crc kubenswrapper[4693]: E1008 07:32:29.074234 4693 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 08 07:32:29 crc kubenswrapper[4693]: E1008 07:32:29.074250 4693 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 08 07:32:29 crc kubenswrapper[4693]: E1008 07:32:29.074300 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/298a15e0-992f-4f83-8067-7e8e6aa47b89-etc-swift podName:298a15e0-992f-4f83-8067-7e8e6aa47b89 nodeName:}" failed. No retries permitted until 2025-10-08 07:32:31.074286088 +0000 UTC m=+936.445251023 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/298a15e0-992f-4f83-8067-7e8e6aa47b89-etc-swift") pod "swift-storage-0" (UID: "298a15e0-992f-4f83-8067-7e8e6aa47b89") : configmap "swift-ring-files" not found Oct 08 07:32:29 crc kubenswrapper[4693]: I1008 07:32:29.372870 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61611c91-8e6e-451f-9964-4b05629e8727" path="/var/lib/kubelet/pods/61611c91-8e6e-451f-9964-4b05629e8727/volumes" Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.117292 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/298a15e0-992f-4f83-8067-7e8e6aa47b89-etc-swift\") pod \"swift-storage-0\" (UID: \"298a15e0-992f-4f83-8067-7e8e6aa47b89\") " pod="openstack/swift-storage-0" Oct 08 07:32:31 crc kubenswrapper[4693]: E1008 07:32:31.117611 4693 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 08 07:32:31 crc kubenswrapper[4693]: E1008 07:32:31.117852 4693 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 08 07:32:31 crc kubenswrapper[4693]: E1008 07:32:31.117949 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/298a15e0-992f-4f83-8067-7e8e6aa47b89-etc-swift podName:298a15e0-992f-4f83-8067-7e8e6aa47b89 nodeName:}" failed. No retries permitted until 2025-10-08 07:32:35.117922958 +0000 UTC m=+940.488887933 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/298a15e0-992f-4f83-8067-7e8e6aa47b89-etc-swift") pod "swift-storage-0" (UID: "298a15e0-992f-4f83-8067-7e8e6aa47b89") : configmap "swift-ring-files" not found Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.329300 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-m9dkh"] Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.330963 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-m9dkh" Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.334604 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.342574 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.342913 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.346088 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-m9dkh"] Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.523851 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/7db9c043-f734-4339-8691-8276fc1a459b-dispersionconf\") pod \"swift-ring-rebalance-m9dkh\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " pod="openstack/swift-ring-rebalance-m9dkh" Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.524337 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mn7kk\" (UniqueName: \"kubernetes.io/projected/7db9c043-f734-4339-8691-8276fc1a459b-kube-api-access-mn7kk\") pod \"swift-ring-rebalance-m9dkh\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " pod="openstack/swift-ring-rebalance-m9dkh" Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.524368 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/7db9c043-f734-4339-8691-8276fc1a459b-ring-data-devices\") pod \"swift-ring-rebalance-m9dkh\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " pod="openstack/swift-ring-rebalance-m9dkh" Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.524466 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7db9c043-f734-4339-8691-8276fc1a459b-combined-ca-bundle\") pod \"swift-ring-rebalance-m9dkh\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " pod="openstack/swift-ring-rebalance-m9dkh" Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.524579 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/7db9c043-f734-4339-8691-8276fc1a459b-etc-swift\") pod \"swift-ring-rebalance-m9dkh\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " pod="openstack/swift-ring-rebalance-m9dkh" Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.524626 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/7db9c043-f734-4339-8691-8276fc1a459b-swiftconf\") pod \"swift-ring-rebalance-m9dkh\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " pod="openstack/swift-ring-rebalance-m9dkh" Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.524692 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7db9c043-f734-4339-8691-8276fc1a459b-scripts\") pod \"swift-ring-rebalance-m9dkh\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " pod="openstack/swift-ring-rebalance-m9dkh" Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.626760 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7db9c043-f734-4339-8691-8276fc1a459b-combined-ca-bundle\") pod \"swift-ring-rebalance-m9dkh\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " pod="openstack/swift-ring-rebalance-m9dkh" Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.626878 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/7db9c043-f734-4339-8691-8276fc1a459b-etc-swift\") pod \"swift-ring-rebalance-m9dkh\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " pod="openstack/swift-ring-rebalance-m9dkh" Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.626954 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/7db9c043-f734-4339-8691-8276fc1a459b-swiftconf\") pod \"swift-ring-rebalance-m9dkh\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " pod="openstack/swift-ring-rebalance-m9dkh" Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.627059 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7db9c043-f734-4339-8691-8276fc1a459b-scripts\") pod \"swift-ring-rebalance-m9dkh\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " pod="openstack/swift-ring-rebalance-m9dkh" Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.627195 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/7db9c043-f734-4339-8691-8276fc1a459b-dispersionconf\") pod \"swift-ring-rebalance-m9dkh\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " pod="openstack/swift-ring-rebalance-m9dkh" Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.627252 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mn7kk\" (UniqueName: \"kubernetes.io/projected/7db9c043-f734-4339-8691-8276fc1a459b-kube-api-access-mn7kk\") pod \"swift-ring-rebalance-m9dkh\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " pod="openstack/swift-ring-rebalance-m9dkh" Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.627284 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/7db9c043-f734-4339-8691-8276fc1a459b-ring-data-devices\") pod \"swift-ring-rebalance-m9dkh\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " pod="openstack/swift-ring-rebalance-m9dkh" Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.628510 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/7db9c043-f734-4339-8691-8276fc1a459b-ring-data-devices\") pod \"swift-ring-rebalance-m9dkh\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " pod="openstack/swift-ring-rebalance-m9dkh" Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.628707 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/7db9c043-f734-4339-8691-8276fc1a459b-etc-swift\") pod \"swift-ring-rebalance-m9dkh\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " pod="openstack/swift-ring-rebalance-m9dkh" Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.628943 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7db9c043-f734-4339-8691-8276fc1a459b-scripts\") pod \"swift-ring-rebalance-m9dkh\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " pod="openstack/swift-ring-rebalance-m9dkh" Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.636691 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7db9c043-f734-4339-8691-8276fc1a459b-combined-ca-bundle\") pod \"swift-ring-rebalance-m9dkh\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " pod="openstack/swift-ring-rebalance-m9dkh" Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.638353 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/7db9c043-f734-4339-8691-8276fc1a459b-swiftconf\") pod \"swift-ring-rebalance-m9dkh\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " pod="openstack/swift-ring-rebalance-m9dkh" Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.640878 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/7db9c043-f734-4339-8691-8276fc1a459b-dispersionconf\") pod \"swift-ring-rebalance-m9dkh\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " pod="openstack/swift-ring-rebalance-m9dkh" Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.663321 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mn7kk\" (UniqueName: \"kubernetes.io/projected/7db9c043-f734-4339-8691-8276fc1a459b-kube-api-access-mn7kk\") pod \"swift-ring-rebalance-m9dkh\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " pod="openstack/swift-ring-rebalance-m9dkh" Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.664413 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" Oct 08 07:32:31 crc kubenswrapper[4693]: I1008 07:32:31.957890 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-m9dkh" Oct 08 07:32:32 crc kubenswrapper[4693]: I1008 07:32:32.239652 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-m9dkh"] Oct 08 07:32:32 crc kubenswrapper[4693]: I1008 07:32:32.787475 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-m9dkh" event={"ID":"7db9c043-f734-4339-8691-8276fc1a459b","Type":"ContainerStarted","Data":"e16fb4bbbc829711301e616129a4da44c47412b27feab048d65b028dfcf2ab74"} Oct 08 07:32:34 crc kubenswrapper[4693]: I1008 07:32:34.237111 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-5p746"] Oct 08 07:32:34 crc kubenswrapper[4693]: I1008 07:32:34.238956 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-5p746" Oct 08 07:32:34 crc kubenswrapper[4693]: I1008 07:32:34.251185 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-5p746"] Oct 08 07:32:34 crc kubenswrapper[4693]: I1008 07:32:34.385683 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9w22g\" (UniqueName: \"kubernetes.io/projected/89ae7404-8482-48a0-8747-1b4feb15d69d-kube-api-access-9w22g\") pod \"keystone-db-create-5p746\" (UID: \"89ae7404-8482-48a0-8747-1b4feb15d69d\") " pod="openstack/keystone-db-create-5p746" Oct 08 07:32:34 crc kubenswrapper[4693]: I1008 07:32:34.442336 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-nlzwf"] Oct 08 07:32:34 crc kubenswrapper[4693]: I1008 07:32:34.446549 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-nlzwf" Oct 08 07:32:34 crc kubenswrapper[4693]: I1008 07:32:34.450556 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-nlzwf"] Oct 08 07:32:34 crc kubenswrapper[4693]: I1008 07:32:34.488059 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9w22g\" (UniqueName: \"kubernetes.io/projected/89ae7404-8482-48a0-8747-1b4feb15d69d-kube-api-access-9w22g\") pod \"keystone-db-create-5p746\" (UID: \"89ae7404-8482-48a0-8747-1b4feb15d69d\") " pod="openstack/keystone-db-create-5p746" Oct 08 07:32:34 crc kubenswrapper[4693]: I1008 07:32:34.514751 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9w22g\" (UniqueName: \"kubernetes.io/projected/89ae7404-8482-48a0-8747-1b4feb15d69d-kube-api-access-9w22g\") pod \"keystone-db-create-5p746\" (UID: \"89ae7404-8482-48a0-8747-1b4feb15d69d\") " pod="openstack/keystone-db-create-5p746" Oct 08 07:32:34 crc kubenswrapper[4693]: I1008 07:32:34.562762 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-5p746" Oct 08 07:32:34 crc kubenswrapper[4693]: I1008 07:32:34.590521 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnszd\" (UniqueName: \"kubernetes.io/projected/7b169bf2-af33-4a98-bccc-43de585c357c-kube-api-access-gnszd\") pod \"placement-db-create-nlzwf\" (UID: \"7b169bf2-af33-4a98-bccc-43de585c357c\") " pod="openstack/placement-db-create-nlzwf" Oct 08 07:32:34 crc kubenswrapper[4693]: I1008 07:32:34.692245 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnszd\" (UniqueName: \"kubernetes.io/projected/7b169bf2-af33-4a98-bccc-43de585c357c-kube-api-access-gnszd\") pod \"placement-db-create-nlzwf\" (UID: \"7b169bf2-af33-4a98-bccc-43de585c357c\") " pod="openstack/placement-db-create-nlzwf" Oct 08 07:32:34 crc kubenswrapper[4693]: I1008 07:32:34.695048 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-zx76x"] Oct 08 07:32:34 crc kubenswrapper[4693]: I1008 07:32:34.696019 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-zx76x" Oct 08 07:32:34 crc kubenswrapper[4693]: I1008 07:32:34.703930 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-zx76x"] Oct 08 07:32:34 crc kubenswrapper[4693]: I1008 07:32:34.722728 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnszd\" (UniqueName: \"kubernetes.io/projected/7b169bf2-af33-4a98-bccc-43de585c357c-kube-api-access-gnszd\") pod \"placement-db-create-nlzwf\" (UID: \"7b169bf2-af33-4a98-bccc-43de585c357c\") " pod="openstack/placement-db-create-nlzwf" Oct 08 07:32:34 crc kubenswrapper[4693]: I1008 07:32:34.770710 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-nlzwf" Oct 08 07:32:34 crc kubenswrapper[4693]: I1008 07:32:34.895830 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2r4m\" (UniqueName: \"kubernetes.io/projected/f9a1dd78-9d6b-4d47-99c1-4823a2c653c2-kube-api-access-m2r4m\") pod \"glance-db-create-zx76x\" (UID: \"f9a1dd78-9d6b-4d47-99c1-4823a2c653c2\") " pod="openstack/glance-db-create-zx76x" Oct 08 07:32:34 crc kubenswrapper[4693]: I1008 07:32:34.997942 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2r4m\" (UniqueName: \"kubernetes.io/projected/f9a1dd78-9d6b-4d47-99c1-4823a2c653c2-kube-api-access-m2r4m\") pod \"glance-db-create-zx76x\" (UID: \"f9a1dd78-9d6b-4d47-99c1-4823a2c653c2\") " pod="openstack/glance-db-create-zx76x" Oct 08 07:32:35 crc kubenswrapper[4693]: I1008 07:32:35.017040 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-5p746"] Oct 08 07:32:35 crc kubenswrapper[4693]: I1008 07:32:35.025876 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2r4m\" (UniqueName: \"kubernetes.io/projected/f9a1dd78-9d6b-4d47-99c1-4823a2c653c2-kube-api-access-m2r4m\") pod \"glance-db-create-zx76x\" (UID: \"f9a1dd78-9d6b-4d47-99c1-4823a2c653c2\") " pod="openstack/glance-db-create-zx76x" Oct 08 07:32:35 crc kubenswrapper[4693]: I1008 07:32:35.200946 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/298a15e0-992f-4f83-8067-7e8e6aa47b89-etc-swift\") pod \"swift-storage-0\" (UID: \"298a15e0-992f-4f83-8067-7e8e6aa47b89\") " pod="openstack/swift-storage-0" Oct 08 07:32:35 crc kubenswrapper[4693]: E1008 07:32:35.201168 4693 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 08 07:32:35 crc kubenswrapper[4693]: E1008 07:32:35.201185 4693 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 08 07:32:35 crc kubenswrapper[4693]: E1008 07:32:35.201234 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/298a15e0-992f-4f83-8067-7e8e6aa47b89-etc-swift podName:298a15e0-992f-4f83-8067-7e8e6aa47b89 nodeName:}" failed. No retries permitted until 2025-10-08 07:32:43.201219134 +0000 UTC m=+948.572184069 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/298a15e0-992f-4f83-8067-7e8e6aa47b89-etc-swift") pod "swift-storage-0" (UID: "298a15e0-992f-4f83-8067-7e8e6aa47b89") : configmap "swift-ring-files" not found Oct 08 07:32:35 crc kubenswrapper[4693]: I1008 07:32:35.323259 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-zx76x" Oct 08 07:32:36 crc kubenswrapper[4693]: W1008 07:32:36.536185 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod89ae7404_8482_48a0_8747_1b4feb15d69d.slice/crio-a1fa63ef2e90e40d9dcdad3ecf26a33b92d79e6b57e0815021718036e040de0c WatchSource:0}: Error finding container a1fa63ef2e90e40d9dcdad3ecf26a33b92d79e6b57e0815021718036e040de0c: Status 404 returned error can't find the container with id a1fa63ef2e90e40d9dcdad3ecf26a33b92d79e6b57e0815021718036e040de0c Oct 08 07:32:36 crc kubenswrapper[4693]: I1008 07:32:36.566122 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-c6w9b" Oct 08 07:32:36 crc kubenswrapper[4693]: I1008 07:32:36.660976 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-wb8fg"] Oct 08 07:32:36 crc kubenswrapper[4693]: I1008 07:32:36.662357 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" podUID="10770f18-c298-4a77-a412-4f71ec5edd95" containerName="dnsmasq-dns" containerID="cri-o://9eed01ec4a36ff05bbb762f10362a709c15e11b5e3384ac6a5f0cc9eef5deba3" gracePeriod=10 Oct 08 07:32:36 crc kubenswrapper[4693]: I1008 07:32:36.828839 4693 generic.go:334] "Generic (PLEG): container finished" podID="10770f18-c298-4a77-a412-4f71ec5edd95" containerID="9eed01ec4a36ff05bbb762f10362a709c15e11b5e3384ac6a5f0cc9eef5deba3" exitCode=0 Oct 08 07:32:36 crc kubenswrapper[4693]: I1008 07:32:36.829086 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" event={"ID":"10770f18-c298-4a77-a412-4f71ec5edd95","Type":"ContainerDied","Data":"9eed01ec4a36ff05bbb762f10362a709c15e11b5e3384ac6a5f0cc9eef5deba3"} Oct 08 07:32:36 crc kubenswrapper[4693]: I1008 07:32:36.831935 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-5p746" event={"ID":"89ae7404-8482-48a0-8747-1b4feb15d69d","Type":"ContainerStarted","Data":"a1fa63ef2e90e40d9dcdad3ecf26a33b92d79e6b57e0815021718036e040de0c"} Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.103420 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.145102 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-nlzwf"] Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.235582 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10770f18-c298-4a77-a412-4f71ec5edd95-config\") pod \"10770f18-c298-4a77-a412-4f71ec5edd95\" (UID: \"10770f18-c298-4a77-a412-4f71ec5edd95\") " Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.235654 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/10770f18-c298-4a77-a412-4f71ec5edd95-ovsdbserver-sb\") pod \"10770f18-c298-4a77-a412-4f71ec5edd95\" (UID: \"10770f18-c298-4a77-a412-4f71ec5edd95\") " Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.235686 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/10770f18-c298-4a77-a412-4f71ec5edd95-dns-svc\") pod \"10770f18-c298-4a77-a412-4f71ec5edd95\" (UID: \"10770f18-c298-4a77-a412-4f71ec5edd95\") " Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.235736 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/10770f18-c298-4a77-a412-4f71ec5edd95-ovsdbserver-nb\") pod \"10770f18-c298-4a77-a412-4f71ec5edd95\" (UID: \"10770f18-c298-4a77-a412-4f71ec5edd95\") " Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.235854 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-grgmf\" (UniqueName: \"kubernetes.io/projected/10770f18-c298-4a77-a412-4f71ec5edd95-kube-api-access-grgmf\") pod \"10770f18-c298-4a77-a412-4f71ec5edd95\" (UID: \"10770f18-c298-4a77-a412-4f71ec5edd95\") " Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.249303 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10770f18-c298-4a77-a412-4f71ec5edd95-kube-api-access-grgmf" (OuterVolumeSpecName: "kube-api-access-grgmf") pod "10770f18-c298-4a77-a412-4f71ec5edd95" (UID: "10770f18-c298-4a77-a412-4f71ec5edd95"). InnerVolumeSpecName "kube-api-access-grgmf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.296686 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10770f18-c298-4a77-a412-4f71ec5edd95-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "10770f18-c298-4a77-a412-4f71ec5edd95" (UID: "10770f18-c298-4a77-a412-4f71ec5edd95"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.302181 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10770f18-c298-4a77-a412-4f71ec5edd95-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "10770f18-c298-4a77-a412-4f71ec5edd95" (UID: "10770f18-c298-4a77-a412-4f71ec5edd95"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.312342 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-zx76x"] Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.337513 4693 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/10770f18-c298-4a77-a412-4f71ec5edd95-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.337554 4693 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/10770f18-c298-4a77-a412-4f71ec5edd95-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.337564 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-grgmf\" (UniqueName: \"kubernetes.io/projected/10770f18-c298-4a77-a412-4f71ec5edd95-kube-api-access-grgmf\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.339292 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10770f18-c298-4a77-a412-4f71ec5edd95-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "10770f18-c298-4a77-a412-4f71ec5edd95" (UID: "10770f18-c298-4a77-a412-4f71ec5edd95"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.383391 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10770f18-c298-4a77-a412-4f71ec5edd95-config" (OuterVolumeSpecName: "config") pod "10770f18-c298-4a77-a412-4f71ec5edd95" (UID: "10770f18-c298-4a77-a412-4f71ec5edd95"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.438782 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10770f18-c298-4a77-a412-4f71ec5edd95-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.438824 4693 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/10770f18-c298-4a77-a412-4f71ec5edd95-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.456963 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.840294 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" event={"ID":"10770f18-c298-4a77-a412-4f71ec5edd95","Type":"ContainerDied","Data":"be6f3e5c0f5defc67f50a548f2f96bb84f4997db41bcb934904ba73de490c0e3"} Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.840346 4693 scope.go:117] "RemoveContainer" containerID="9eed01ec4a36ff05bbb762f10362a709c15e11b5e3384ac6a5f0cc9eef5deba3" Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.840461 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-wb8fg" Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.853663 4693 generic.go:334] "Generic (PLEG): container finished" podID="7b169bf2-af33-4a98-bccc-43de585c357c" containerID="474fa088465bb1d6f5ca25ec9134f34e5ef950d607d402f4a87e5e3448780638" exitCode=0 Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.853918 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-nlzwf" event={"ID":"7b169bf2-af33-4a98-bccc-43de585c357c","Type":"ContainerDied","Data":"474fa088465bb1d6f5ca25ec9134f34e5ef950d607d402f4a87e5e3448780638"} Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.854133 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-nlzwf" event={"ID":"7b169bf2-af33-4a98-bccc-43de585c357c","Type":"ContainerStarted","Data":"37067fe0e23ce9e749310fa20a6203785199817f0849e535208cdbfb74db89df"} Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.862831 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-m9dkh" event={"ID":"7db9c043-f734-4339-8691-8276fc1a459b","Type":"ContainerStarted","Data":"f75e97abe728a6b5fad75371fccc5f6a9765fd82c035056414ebc6e3eb534286"} Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.873656 4693 generic.go:334] "Generic (PLEG): container finished" podID="89ae7404-8482-48a0-8747-1b4feb15d69d" containerID="84c451866e755a53a71e598d616e8c463a11edfeb3a9c23389f1061ff5797574" exitCode=0 Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.873765 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-5p746" event={"ID":"89ae7404-8482-48a0-8747-1b4feb15d69d","Type":"ContainerDied","Data":"84c451866e755a53a71e598d616e8c463a11edfeb3a9c23389f1061ff5797574"} Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.874947 4693 scope.go:117] "RemoveContainer" containerID="2604c37490cbb9fd01d51b7686d56b449b71ce6fbea561da82b74807711c3e07" Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.876352 4693 generic.go:334] "Generic (PLEG): container finished" podID="f9a1dd78-9d6b-4d47-99c1-4823a2c653c2" containerID="9109a4c49db16e29b7012ed1bc6a522e706bb0b442f27e99d3e18ace6540341f" exitCode=0 Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.876396 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-zx76x" event={"ID":"f9a1dd78-9d6b-4d47-99c1-4823a2c653c2","Type":"ContainerDied","Data":"9109a4c49db16e29b7012ed1bc6a522e706bb0b442f27e99d3e18ace6540341f"} Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.876419 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-zx76x" event={"ID":"f9a1dd78-9d6b-4d47-99c1-4823a2c653c2","Type":"ContainerStarted","Data":"60b15dc3b1df0d141b7786e20685b9cc41d863030d83e4cd595a440b07247e9c"} Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.916834 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-wb8fg"] Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.927247 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-wb8fg"] Oct 08 07:32:37 crc kubenswrapper[4693]: I1008 07:32:37.938163 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-m9dkh" podStartSLOduration=2.496331477 podStartE2EDuration="6.938139539s" podCreationTimestamp="2025-10-08 07:32:31 +0000 UTC" firstStartedPulling="2025-10-08 07:32:32.245165489 +0000 UTC m=+937.616130434" lastFinishedPulling="2025-10-08 07:32:36.686973551 +0000 UTC m=+942.057938496" observedRunningTime="2025-10-08 07:32:37.917934396 +0000 UTC m=+943.288899341" watchObservedRunningTime="2025-10-08 07:32:37.938139539 +0000 UTC m=+943.309104484" Oct 08 07:32:39 crc kubenswrapper[4693]: I1008 07:32:39.332384 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-5p746" Oct 08 07:32:39 crc kubenswrapper[4693]: I1008 07:32:39.381418 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10770f18-c298-4a77-a412-4f71ec5edd95" path="/var/lib/kubelet/pods/10770f18-c298-4a77-a412-4f71ec5edd95/volumes" Oct 08 07:32:39 crc kubenswrapper[4693]: I1008 07:32:39.416730 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-nlzwf" Oct 08 07:32:39 crc kubenswrapper[4693]: I1008 07:32:39.421046 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-zx76x" Oct 08 07:32:39 crc kubenswrapper[4693]: I1008 07:32:39.491561 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9w22g\" (UniqueName: \"kubernetes.io/projected/89ae7404-8482-48a0-8747-1b4feb15d69d-kube-api-access-9w22g\") pod \"89ae7404-8482-48a0-8747-1b4feb15d69d\" (UID: \"89ae7404-8482-48a0-8747-1b4feb15d69d\") " Oct 08 07:32:39 crc kubenswrapper[4693]: I1008 07:32:39.498935 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89ae7404-8482-48a0-8747-1b4feb15d69d-kube-api-access-9w22g" (OuterVolumeSpecName: "kube-api-access-9w22g") pod "89ae7404-8482-48a0-8747-1b4feb15d69d" (UID: "89ae7404-8482-48a0-8747-1b4feb15d69d"). InnerVolumeSpecName "kube-api-access-9w22g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:32:39 crc kubenswrapper[4693]: I1008 07:32:39.593246 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gnszd\" (UniqueName: \"kubernetes.io/projected/7b169bf2-af33-4a98-bccc-43de585c357c-kube-api-access-gnszd\") pod \"7b169bf2-af33-4a98-bccc-43de585c357c\" (UID: \"7b169bf2-af33-4a98-bccc-43de585c357c\") " Oct 08 07:32:39 crc kubenswrapper[4693]: I1008 07:32:39.593626 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m2r4m\" (UniqueName: \"kubernetes.io/projected/f9a1dd78-9d6b-4d47-99c1-4823a2c653c2-kube-api-access-m2r4m\") pod \"f9a1dd78-9d6b-4d47-99c1-4823a2c653c2\" (UID: \"f9a1dd78-9d6b-4d47-99c1-4823a2c653c2\") " Oct 08 07:32:39 crc kubenswrapper[4693]: I1008 07:32:39.594400 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9w22g\" (UniqueName: \"kubernetes.io/projected/89ae7404-8482-48a0-8747-1b4feb15d69d-kube-api-access-9w22g\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:39 crc kubenswrapper[4693]: I1008 07:32:39.597890 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b169bf2-af33-4a98-bccc-43de585c357c-kube-api-access-gnszd" (OuterVolumeSpecName: "kube-api-access-gnszd") pod "7b169bf2-af33-4a98-bccc-43de585c357c" (UID: "7b169bf2-af33-4a98-bccc-43de585c357c"). InnerVolumeSpecName "kube-api-access-gnszd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:32:39 crc kubenswrapper[4693]: I1008 07:32:39.598353 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9a1dd78-9d6b-4d47-99c1-4823a2c653c2-kube-api-access-m2r4m" (OuterVolumeSpecName: "kube-api-access-m2r4m") pod "f9a1dd78-9d6b-4d47-99c1-4823a2c653c2" (UID: "f9a1dd78-9d6b-4d47-99c1-4823a2c653c2"). InnerVolumeSpecName "kube-api-access-m2r4m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:32:39 crc kubenswrapper[4693]: I1008 07:32:39.695965 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m2r4m\" (UniqueName: \"kubernetes.io/projected/f9a1dd78-9d6b-4d47-99c1-4823a2c653c2-kube-api-access-m2r4m\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:39 crc kubenswrapper[4693]: I1008 07:32:39.696005 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gnszd\" (UniqueName: \"kubernetes.io/projected/7b169bf2-af33-4a98-bccc-43de585c357c-kube-api-access-gnszd\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:39 crc kubenswrapper[4693]: I1008 07:32:39.903201 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-zx76x" event={"ID":"f9a1dd78-9d6b-4d47-99c1-4823a2c653c2","Type":"ContainerDied","Data":"60b15dc3b1df0d141b7786e20685b9cc41d863030d83e4cd595a440b07247e9c"} Oct 08 07:32:39 crc kubenswrapper[4693]: I1008 07:32:39.903245 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="60b15dc3b1df0d141b7786e20685b9cc41d863030d83e4cd595a440b07247e9c" Oct 08 07:32:39 crc kubenswrapper[4693]: I1008 07:32:39.903268 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-zx76x" Oct 08 07:32:39 crc kubenswrapper[4693]: I1008 07:32:39.904801 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-nlzwf" event={"ID":"7b169bf2-af33-4a98-bccc-43de585c357c","Type":"ContainerDied","Data":"37067fe0e23ce9e749310fa20a6203785199817f0849e535208cdbfb74db89df"} Oct 08 07:32:39 crc kubenswrapper[4693]: I1008 07:32:39.904855 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-nlzwf" Oct 08 07:32:39 crc kubenswrapper[4693]: I1008 07:32:39.904866 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="37067fe0e23ce9e749310fa20a6203785199817f0849e535208cdbfb74db89df" Oct 08 07:32:39 crc kubenswrapper[4693]: I1008 07:32:39.906239 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-5p746" event={"ID":"89ae7404-8482-48a0-8747-1b4feb15d69d","Type":"ContainerDied","Data":"a1fa63ef2e90e40d9dcdad3ecf26a33b92d79e6b57e0815021718036e040de0c"} Oct 08 07:32:39 crc kubenswrapper[4693]: I1008 07:32:39.906263 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a1fa63ef2e90e40d9dcdad3ecf26a33b92d79e6b57e0815021718036e040de0c" Oct 08 07:32:39 crc kubenswrapper[4693]: I1008 07:32:39.906305 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-5p746" Oct 08 07:32:40 crc kubenswrapper[4693]: I1008 07:32:40.916352 4693 generic.go:334] "Generic (PLEG): container finished" podID="344b4125-6848-4985-b722-8e9e589b1ab4" containerID="59b5f5fb3f59acdbcb78be6d525d4a9569344861cb9fbcb63563f2eda385b2f5" exitCode=0 Oct 08 07:32:40 crc kubenswrapper[4693]: I1008 07:32:40.916454 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"344b4125-6848-4985-b722-8e9e589b1ab4","Type":"ContainerDied","Data":"59b5f5fb3f59acdbcb78be6d525d4a9569344861cb9fbcb63563f2eda385b2f5"} Oct 08 07:32:40 crc kubenswrapper[4693]: I1008 07:32:40.919059 4693 generic.go:334] "Generic (PLEG): container finished" podID="cc3c1ad2-7355-4db4-af71-27c3454a025c" containerID="1bba1bea243da621ea1b6179ad5db08abecc8d2aa48bc3f641980d1431dcd318" exitCode=0 Oct 08 07:32:40 crc kubenswrapper[4693]: I1008 07:32:40.919101 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cc3c1ad2-7355-4db4-af71-27c3454a025c","Type":"ContainerDied","Data":"1bba1bea243da621ea1b6179ad5db08abecc8d2aa48bc3f641980d1431dcd318"} Oct 08 07:32:41 crc kubenswrapper[4693]: I1008 07:32:41.932911 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"344b4125-6848-4985-b722-8e9e589b1ab4","Type":"ContainerStarted","Data":"0b380042086b13851d1db0c9f16aba42e574a7a4e45d02dddcf5aa3026a8cd53"} Oct 08 07:32:41 crc kubenswrapper[4693]: I1008 07:32:41.933600 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 08 07:32:41 crc kubenswrapper[4693]: I1008 07:32:41.934957 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cc3c1ad2-7355-4db4-af71-27c3454a025c","Type":"ContainerStarted","Data":"5b9ae188c9c7a09d4da4cf074043c5806d749edf48accbeaacfe0738b87c4545"} Oct 08 07:32:41 crc kubenswrapper[4693]: I1008 07:32:41.935201 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:32:41 crc kubenswrapper[4693]: I1008 07:32:41.958375 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.527479702 podStartE2EDuration="51.958360132s" podCreationTimestamp="2025-10-08 07:31:50 +0000 UTC" firstStartedPulling="2025-10-08 07:31:51.812705133 +0000 UTC m=+897.183670068" lastFinishedPulling="2025-10-08 07:32:06.243585553 +0000 UTC m=+911.614550498" observedRunningTime="2025-10-08 07:32:41.955415894 +0000 UTC m=+947.326380849" watchObservedRunningTime="2025-10-08 07:32:41.958360132 +0000 UTC m=+947.329325067" Oct 08 07:32:41 crc kubenswrapper[4693]: I1008 07:32:41.982742 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=38.570610052 podStartE2EDuration="52.982726264s" podCreationTimestamp="2025-10-08 07:31:49 +0000 UTC" firstStartedPulling="2025-10-08 07:31:51.711305302 +0000 UTC m=+897.082270237" lastFinishedPulling="2025-10-08 07:32:06.123421484 +0000 UTC m=+911.494386449" observedRunningTime="2025-10-08 07:32:41.977687481 +0000 UTC m=+947.348652446" watchObservedRunningTime="2025-10-08 07:32:41.982726264 +0000 UTC m=+947.353691199" Oct 08 07:32:43 crc kubenswrapper[4693]: I1008 07:32:43.262602 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/298a15e0-992f-4f83-8067-7e8e6aa47b89-etc-swift\") pod \"swift-storage-0\" (UID: \"298a15e0-992f-4f83-8067-7e8e6aa47b89\") " pod="openstack/swift-storage-0" Oct 08 07:32:43 crc kubenswrapper[4693]: E1008 07:32:43.262863 4693 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 08 07:32:43 crc kubenswrapper[4693]: E1008 07:32:43.262886 4693 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 08 07:32:43 crc kubenswrapper[4693]: E1008 07:32:43.262950 4693 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/298a15e0-992f-4f83-8067-7e8e6aa47b89-etc-swift podName:298a15e0-992f-4f83-8067-7e8e6aa47b89 nodeName:}" failed. No retries permitted until 2025-10-08 07:32:59.26292977 +0000 UTC m=+964.633894735 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/298a15e0-992f-4f83-8067-7e8e6aa47b89-etc-swift") pod "swift-storage-0" (UID: "298a15e0-992f-4f83-8067-7e8e6aa47b89") : configmap "swift-ring-files" not found Oct 08 07:32:43 crc kubenswrapper[4693]: I1008 07:32:43.949376 4693 generic.go:334] "Generic (PLEG): container finished" podID="7db9c043-f734-4339-8691-8276fc1a459b" containerID="f75e97abe728a6b5fad75371fccc5f6a9765fd82c035056414ebc6e3eb534286" exitCode=0 Oct 08 07:32:43 crc kubenswrapper[4693]: I1008 07:32:43.949501 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-m9dkh" event={"ID":"7db9c043-f734-4339-8691-8276fc1a459b","Type":"ContainerDied","Data":"f75e97abe728a6b5fad75371fccc5f6a9765fd82c035056414ebc6e3eb534286"} Oct 08 07:32:44 crc kubenswrapper[4693]: I1008 07:32:44.835126 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-4860-account-create-j9764"] Oct 08 07:32:44 crc kubenswrapper[4693]: E1008 07:32:44.835583 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89ae7404-8482-48a0-8747-1b4feb15d69d" containerName="mariadb-database-create" Oct 08 07:32:44 crc kubenswrapper[4693]: I1008 07:32:44.835606 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="89ae7404-8482-48a0-8747-1b4feb15d69d" containerName="mariadb-database-create" Oct 08 07:32:44 crc kubenswrapper[4693]: E1008 07:32:44.835624 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b169bf2-af33-4a98-bccc-43de585c357c" containerName="mariadb-database-create" Oct 08 07:32:44 crc kubenswrapper[4693]: I1008 07:32:44.835635 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b169bf2-af33-4a98-bccc-43de585c357c" containerName="mariadb-database-create" Oct 08 07:32:44 crc kubenswrapper[4693]: E1008 07:32:44.835656 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10770f18-c298-4a77-a412-4f71ec5edd95" containerName="init" Oct 08 07:32:44 crc kubenswrapper[4693]: I1008 07:32:44.835665 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="10770f18-c298-4a77-a412-4f71ec5edd95" containerName="init" Oct 08 07:32:44 crc kubenswrapper[4693]: E1008 07:32:44.835686 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10770f18-c298-4a77-a412-4f71ec5edd95" containerName="dnsmasq-dns" Oct 08 07:32:44 crc kubenswrapper[4693]: I1008 07:32:44.835696 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="10770f18-c298-4a77-a412-4f71ec5edd95" containerName="dnsmasq-dns" Oct 08 07:32:44 crc kubenswrapper[4693]: E1008 07:32:44.835725 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a1dd78-9d6b-4d47-99c1-4823a2c653c2" containerName="mariadb-database-create" Oct 08 07:32:44 crc kubenswrapper[4693]: I1008 07:32:44.835737 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a1dd78-9d6b-4d47-99c1-4823a2c653c2" containerName="mariadb-database-create" Oct 08 07:32:44 crc kubenswrapper[4693]: I1008 07:32:44.835977 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="10770f18-c298-4a77-a412-4f71ec5edd95" containerName="dnsmasq-dns" Oct 08 07:32:44 crc kubenswrapper[4693]: I1008 07:32:44.835996 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9a1dd78-9d6b-4d47-99c1-4823a2c653c2" containerName="mariadb-database-create" Oct 08 07:32:44 crc kubenswrapper[4693]: I1008 07:32:44.836018 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b169bf2-af33-4a98-bccc-43de585c357c" containerName="mariadb-database-create" Oct 08 07:32:44 crc kubenswrapper[4693]: I1008 07:32:44.836033 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="89ae7404-8482-48a0-8747-1b4feb15d69d" containerName="mariadb-database-create" Oct 08 07:32:44 crc kubenswrapper[4693]: I1008 07:32:44.836754 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-4860-account-create-j9764" Oct 08 07:32:44 crc kubenswrapper[4693]: I1008 07:32:44.841738 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Oct 08 07:32:44 crc kubenswrapper[4693]: I1008 07:32:44.865608 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-4860-account-create-j9764"] Oct 08 07:32:44 crc kubenswrapper[4693]: I1008 07:32:44.891429 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5l5p7\" (UniqueName: \"kubernetes.io/projected/7ab5a771-9b81-47c8-ab27-83c657676763-kube-api-access-5l5p7\") pod \"glance-4860-account-create-j9764\" (UID: \"7ab5a771-9b81-47c8-ab27-83c657676763\") " pod="openstack/glance-4860-account-create-j9764" Oct 08 07:32:44 crc kubenswrapper[4693]: I1008 07:32:44.993395 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5l5p7\" (UniqueName: \"kubernetes.io/projected/7ab5a771-9b81-47c8-ab27-83c657676763-kube-api-access-5l5p7\") pod \"glance-4860-account-create-j9764\" (UID: \"7ab5a771-9b81-47c8-ab27-83c657676763\") " pod="openstack/glance-4860-account-create-j9764" Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.017320 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5l5p7\" (UniqueName: \"kubernetes.io/projected/7ab5a771-9b81-47c8-ab27-83c657676763-kube-api-access-5l5p7\") pod \"glance-4860-account-create-j9764\" (UID: \"7ab5a771-9b81-47c8-ab27-83c657676763\") " pod="openstack/glance-4860-account-create-j9764" Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.056555 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-qmltj" podUID="5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d" containerName="ovn-controller" probeResult="failure" output=< Oct 08 07:32:45 crc kubenswrapper[4693]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 08 07:32:45 crc kubenswrapper[4693]: > Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.157912 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-4860-account-create-j9764" Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.321283 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-m9dkh" Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.399257 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mn7kk\" (UniqueName: \"kubernetes.io/projected/7db9c043-f734-4339-8691-8276fc1a459b-kube-api-access-mn7kk\") pod \"7db9c043-f734-4339-8691-8276fc1a459b\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.400556 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7db9c043-f734-4339-8691-8276fc1a459b-scripts\") pod \"7db9c043-f734-4339-8691-8276fc1a459b\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.400703 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/7db9c043-f734-4339-8691-8276fc1a459b-etc-swift\") pod \"7db9c043-f734-4339-8691-8276fc1a459b\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.400973 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/7db9c043-f734-4339-8691-8276fc1a459b-dispersionconf\") pod \"7db9c043-f734-4339-8691-8276fc1a459b\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.401075 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/7db9c043-f734-4339-8691-8276fc1a459b-ring-data-devices\") pod \"7db9c043-f734-4339-8691-8276fc1a459b\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.401264 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7db9c043-f734-4339-8691-8276fc1a459b-combined-ca-bundle\") pod \"7db9c043-f734-4339-8691-8276fc1a459b\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.401410 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/7db9c043-f734-4339-8691-8276fc1a459b-swiftconf\") pod \"7db9c043-f734-4339-8691-8276fc1a459b\" (UID: \"7db9c043-f734-4339-8691-8276fc1a459b\") " Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.401551 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7db9c043-f734-4339-8691-8276fc1a459b-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "7db9c043-f734-4339-8691-8276fc1a459b" (UID: "7db9c043-f734-4339-8691-8276fc1a459b"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.401961 4693 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/7db9c043-f734-4339-8691-8276fc1a459b-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.402519 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7db9c043-f734-4339-8691-8276fc1a459b-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "7db9c043-f734-4339-8691-8276fc1a459b" (UID: "7db9c043-f734-4339-8691-8276fc1a459b"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.406054 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7db9c043-f734-4339-8691-8276fc1a459b-kube-api-access-mn7kk" (OuterVolumeSpecName: "kube-api-access-mn7kk") pod "7db9c043-f734-4339-8691-8276fc1a459b" (UID: "7db9c043-f734-4339-8691-8276fc1a459b"). InnerVolumeSpecName "kube-api-access-mn7kk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.413059 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7db9c043-f734-4339-8691-8276fc1a459b-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "7db9c043-f734-4339-8691-8276fc1a459b" (UID: "7db9c043-f734-4339-8691-8276fc1a459b"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.431507 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7db9c043-f734-4339-8691-8276fc1a459b-scripts" (OuterVolumeSpecName: "scripts") pod "7db9c043-f734-4339-8691-8276fc1a459b" (UID: "7db9c043-f734-4339-8691-8276fc1a459b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.431603 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7db9c043-f734-4339-8691-8276fc1a459b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7db9c043-f734-4339-8691-8276fc1a459b" (UID: "7db9c043-f734-4339-8691-8276fc1a459b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.435986 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7db9c043-f734-4339-8691-8276fc1a459b-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "7db9c043-f734-4339-8691-8276fc1a459b" (UID: "7db9c043-f734-4339-8691-8276fc1a459b"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.503898 4693 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/7db9c043-f734-4339-8691-8276fc1a459b-ring-data-devices\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.503934 4693 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/7db9c043-f734-4339-8691-8276fc1a459b-dispersionconf\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.503948 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7db9c043-f734-4339-8691-8276fc1a459b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.503961 4693 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/7db9c043-f734-4339-8691-8276fc1a459b-swiftconf\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.503974 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mn7kk\" (UniqueName: \"kubernetes.io/projected/7db9c043-f734-4339-8691-8276fc1a459b-kube-api-access-mn7kk\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.503988 4693 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7db9c043-f734-4339-8691-8276fc1a459b-scripts\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.683247 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-4860-account-create-j9764"] Oct 08 07:32:45 crc kubenswrapper[4693]: W1008 07:32:45.687535 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ab5a771_9b81_47c8_ab27_83c657676763.slice/crio-b06b420b957e9ef9818c799e58fe3693e79305c52d603b2f1b375a997dc99479 WatchSource:0}: Error finding container b06b420b957e9ef9818c799e58fe3693e79305c52d603b2f1b375a997dc99479: Status 404 returned error can't find the container with id b06b420b957e9ef9818c799e58fe3693e79305c52d603b2f1b375a997dc99479 Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.969128 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-m9dkh" event={"ID":"7db9c043-f734-4339-8691-8276fc1a459b","Type":"ContainerDied","Data":"e16fb4bbbc829711301e616129a4da44c47412b27feab048d65b028dfcf2ab74"} Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.969638 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e16fb4bbbc829711301e616129a4da44c47412b27feab048d65b028dfcf2ab74" Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.969205 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-m9dkh" Oct 08 07:32:45 crc kubenswrapper[4693]: I1008 07:32:45.971157 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-4860-account-create-j9764" event={"ID":"7ab5a771-9b81-47c8-ab27-83c657676763","Type":"ContainerStarted","Data":"b06b420b957e9ef9818c799e58fe3693e79305c52d603b2f1b375a997dc99479"} Oct 08 07:32:47 crc kubenswrapper[4693]: I1008 07:32:47.987417 4693 generic.go:334] "Generic (PLEG): container finished" podID="7ab5a771-9b81-47c8-ab27-83c657676763" containerID="5de6b01f998c465874b3020e3f3eb0c67270542cb6a99faa4bb331d2de3a0fee" exitCode=0 Oct 08 07:32:47 crc kubenswrapper[4693]: I1008 07:32:47.987580 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-4860-account-create-j9764" event={"ID":"7ab5a771-9b81-47c8-ab27-83c657676763","Type":"ContainerDied","Data":"5de6b01f998c465874b3020e3f3eb0c67270542cb6a99faa4bb331d2de3a0fee"} Oct 08 07:32:49 crc kubenswrapper[4693]: I1008 07:32:49.481790 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-4860-account-create-j9764" Oct 08 07:32:49 crc kubenswrapper[4693]: I1008 07:32:49.592991 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5l5p7\" (UniqueName: \"kubernetes.io/projected/7ab5a771-9b81-47c8-ab27-83c657676763-kube-api-access-5l5p7\") pod \"7ab5a771-9b81-47c8-ab27-83c657676763\" (UID: \"7ab5a771-9b81-47c8-ab27-83c657676763\") " Oct 08 07:32:49 crc kubenswrapper[4693]: I1008 07:32:49.600861 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ab5a771-9b81-47c8-ab27-83c657676763-kube-api-access-5l5p7" (OuterVolumeSpecName: "kube-api-access-5l5p7") pod "7ab5a771-9b81-47c8-ab27-83c657676763" (UID: "7ab5a771-9b81-47c8-ab27-83c657676763"). InnerVolumeSpecName "kube-api-access-5l5p7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:32:49 crc kubenswrapper[4693]: I1008 07:32:49.694889 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5l5p7\" (UniqueName: \"kubernetes.io/projected/7ab5a771-9b81-47c8-ab27-83c657676763-kube-api-access-5l5p7\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.009913 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-4860-account-create-j9764" event={"ID":"7ab5a771-9b81-47c8-ab27-83c657676763","Type":"ContainerDied","Data":"b06b420b957e9ef9818c799e58fe3693e79305c52d603b2f1b375a997dc99479"} Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.009990 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-4860-account-create-j9764" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.010010 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b06b420b957e9ef9818c799e58fe3693e79305c52d603b2f1b375a997dc99479" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.078574 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-qmltj" podUID="5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d" containerName="ovn-controller" probeResult="failure" output=< Oct 08 07:32:50 crc kubenswrapper[4693]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 08 07:32:50 crc kubenswrapper[4693]: > Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.088792 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-5mdkq" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.102510 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-5mdkq" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.350496 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-qmltj-config-fqcxl"] Oct 08 07:32:50 crc kubenswrapper[4693]: E1008 07:32:50.350975 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ab5a771-9b81-47c8-ab27-83c657676763" containerName="mariadb-account-create" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.351003 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ab5a771-9b81-47c8-ab27-83c657676763" containerName="mariadb-account-create" Oct 08 07:32:50 crc kubenswrapper[4693]: E1008 07:32:50.351063 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7db9c043-f734-4339-8691-8276fc1a459b" containerName="swift-ring-rebalance" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.351076 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="7db9c043-f734-4339-8691-8276fc1a459b" containerName="swift-ring-rebalance" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.351366 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="7db9c043-f734-4339-8691-8276fc1a459b" containerName="swift-ring-rebalance" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.351405 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ab5a771-9b81-47c8-ab27-83c657676763" containerName="mariadb-account-create" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.352251 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-qmltj-config-fqcxl" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.357355 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.365375 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-qmltj-config-fqcxl"] Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.405049 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/67801bb8-7e75-48a4-8976-de43d426bc4b-var-run-ovn\") pod \"ovn-controller-qmltj-config-fqcxl\" (UID: \"67801bb8-7e75-48a4-8976-de43d426bc4b\") " pod="openstack/ovn-controller-qmltj-config-fqcxl" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.405153 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67gpb\" (UniqueName: \"kubernetes.io/projected/67801bb8-7e75-48a4-8976-de43d426bc4b-kube-api-access-67gpb\") pod \"ovn-controller-qmltj-config-fqcxl\" (UID: \"67801bb8-7e75-48a4-8976-de43d426bc4b\") " pod="openstack/ovn-controller-qmltj-config-fqcxl" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.405202 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/67801bb8-7e75-48a4-8976-de43d426bc4b-var-run\") pod \"ovn-controller-qmltj-config-fqcxl\" (UID: \"67801bb8-7e75-48a4-8976-de43d426bc4b\") " pod="openstack/ovn-controller-qmltj-config-fqcxl" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.405234 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/67801bb8-7e75-48a4-8976-de43d426bc4b-additional-scripts\") pod \"ovn-controller-qmltj-config-fqcxl\" (UID: \"67801bb8-7e75-48a4-8976-de43d426bc4b\") " pod="openstack/ovn-controller-qmltj-config-fqcxl" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.405317 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/67801bb8-7e75-48a4-8976-de43d426bc4b-scripts\") pod \"ovn-controller-qmltj-config-fqcxl\" (UID: \"67801bb8-7e75-48a4-8976-de43d426bc4b\") " pod="openstack/ovn-controller-qmltj-config-fqcxl" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.405368 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/67801bb8-7e75-48a4-8976-de43d426bc4b-var-log-ovn\") pod \"ovn-controller-qmltj-config-fqcxl\" (UID: \"67801bb8-7e75-48a4-8976-de43d426bc4b\") " pod="openstack/ovn-controller-qmltj-config-fqcxl" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.506769 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67gpb\" (UniqueName: \"kubernetes.io/projected/67801bb8-7e75-48a4-8976-de43d426bc4b-kube-api-access-67gpb\") pod \"ovn-controller-qmltj-config-fqcxl\" (UID: \"67801bb8-7e75-48a4-8976-de43d426bc4b\") " pod="openstack/ovn-controller-qmltj-config-fqcxl" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.506848 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/67801bb8-7e75-48a4-8976-de43d426bc4b-var-run\") pod \"ovn-controller-qmltj-config-fqcxl\" (UID: \"67801bb8-7e75-48a4-8976-de43d426bc4b\") " pod="openstack/ovn-controller-qmltj-config-fqcxl" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.506889 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/67801bb8-7e75-48a4-8976-de43d426bc4b-additional-scripts\") pod \"ovn-controller-qmltj-config-fqcxl\" (UID: \"67801bb8-7e75-48a4-8976-de43d426bc4b\") " pod="openstack/ovn-controller-qmltj-config-fqcxl" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.506971 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/67801bb8-7e75-48a4-8976-de43d426bc4b-scripts\") pod \"ovn-controller-qmltj-config-fqcxl\" (UID: \"67801bb8-7e75-48a4-8976-de43d426bc4b\") " pod="openstack/ovn-controller-qmltj-config-fqcxl" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.507002 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/67801bb8-7e75-48a4-8976-de43d426bc4b-var-log-ovn\") pod \"ovn-controller-qmltj-config-fqcxl\" (UID: \"67801bb8-7e75-48a4-8976-de43d426bc4b\") " pod="openstack/ovn-controller-qmltj-config-fqcxl" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.507104 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/67801bb8-7e75-48a4-8976-de43d426bc4b-var-run-ovn\") pod \"ovn-controller-qmltj-config-fqcxl\" (UID: \"67801bb8-7e75-48a4-8976-de43d426bc4b\") " pod="openstack/ovn-controller-qmltj-config-fqcxl" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.507323 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/67801bb8-7e75-48a4-8976-de43d426bc4b-var-run-ovn\") pod \"ovn-controller-qmltj-config-fqcxl\" (UID: \"67801bb8-7e75-48a4-8976-de43d426bc4b\") " pod="openstack/ovn-controller-qmltj-config-fqcxl" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.507350 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/67801bb8-7e75-48a4-8976-de43d426bc4b-var-run\") pod \"ovn-controller-qmltj-config-fqcxl\" (UID: \"67801bb8-7e75-48a4-8976-de43d426bc4b\") " pod="openstack/ovn-controller-qmltj-config-fqcxl" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.507370 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/67801bb8-7e75-48a4-8976-de43d426bc4b-var-log-ovn\") pod \"ovn-controller-qmltj-config-fqcxl\" (UID: \"67801bb8-7e75-48a4-8976-de43d426bc4b\") " pod="openstack/ovn-controller-qmltj-config-fqcxl" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.508274 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/67801bb8-7e75-48a4-8976-de43d426bc4b-additional-scripts\") pod \"ovn-controller-qmltj-config-fqcxl\" (UID: \"67801bb8-7e75-48a4-8976-de43d426bc4b\") " pod="openstack/ovn-controller-qmltj-config-fqcxl" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.509863 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/67801bb8-7e75-48a4-8976-de43d426bc4b-scripts\") pod \"ovn-controller-qmltj-config-fqcxl\" (UID: \"67801bb8-7e75-48a4-8976-de43d426bc4b\") " pod="openstack/ovn-controller-qmltj-config-fqcxl" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.533738 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67gpb\" (UniqueName: \"kubernetes.io/projected/67801bb8-7e75-48a4-8976-de43d426bc4b-kube-api-access-67gpb\") pod \"ovn-controller-qmltj-config-fqcxl\" (UID: \"67801bb8-7e75-48a4-8976-de43d426bc4b\") " pod="openstack/ovn-controller-qmltj-config-fqcxl" Oct 08 07:32:50 crc kubenswrapper[4693]: I1008 07:32:50.743001 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-qmltj-config-fqcxl" Oct 08 07:32:51 crc kubenswrapper[4693]: I1008 07:32:51.149123 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 08 07:32:51 crc kubenswrapper[4693]: I1008 07:32:51.265985 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-qmltj-config-fqcxl"] Oct 08 07:32:51 crc kubenswrapper[4693]: I1008 07:32:51.496050 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:32:51 crc kubenswrapper[4693]: I1008 07:32:51.526201 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-bg956"] Oct 08 07:32:51 crc kubenswrapper[4693]: I1008 07:32:51.527138 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-bg956" Oct 08 07:32:51 crc kubenswrapper[4693]: I1008 07:32:51.595894 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-bg956"] Oct 08 07:32:51 crc kubenswrapper[4693]: I1008 07:32:51.627857 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qj2df\" (UniqueName: \"kubernetes.io/projected/4e7ed96c-26ab-4895-bcc8-35bba61e5240-kube-api-access-qj2df\") pod \"cinder-db-create-bg956\" (UID: \"4e7ed96c-26ab-4895-bcc8-35bba61e5240\") " pod="openstack/cinder-db-create-bg956" Oct 08 07:32:51 crc kubenswrapper[4693]: I1008 07:32:51.644358 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-5xhnc"] Oct 08 07:32:51 crc kubenswrapper[4693]: I1008 07:32:51.646064 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-5xhnc" Oct 08 07:32:51 crc kubenswrapper[4693]: I1008 07:32:51.649987 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-5xhnc"] Oct 08 07:32:51 crc kubenswrapper[4693]: I1008 07:32:51.729168 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68clw\" (UniqueName: \"kubernetes.io/projected/68d99e15-182a-46b3-8478-d0f2b3763662-kube-api-access-68clw\") pod \"barbican-db-create-5xhnc\" (UID: \"68d99e15-182a-46b3-8478-d0f2b3763662\") " pod="openstack/barbican-db-create-5xhnc" Oct 08 07:32:51 crc kubenswrapper[4693]: I1008 07:32:51.729237 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qj2df\" (UniqueName: \"kubernetes.io/projected/4e7ed96c-26ab-4895-bcc8-35bba61e5240-kube-api-access-qj2df\") pod \"cinder-db-create-bg956\" (UID: \"4e7ed96c-26ab-4895-bcc8-35bba61e5240\") " pod="openstack/cinder-db-create-bg956" Oct 08 07:32:51 crc kubenswrapper[4693]: I1008 07:32:51.749434 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qj2df\" (UniqueName: \"kubernetes.io/projected/4e7ed96c-26ab-4895-bcc8-35bba61e5240-kube-api-access-qj2df\") pod \"cinder-db-create-bg956\" (UID: \"4e7ed96c-26ab-4895-bcc8-35bba61e5240\") " pod="openstack/cinder-db-create-bg956" Oct 08 07:32:51 crc kubenswrapper[4693]: I1008 07:32:51.818018 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-w6cgs"] Oct 08 07:32:51 crc kubenswrapper[4693]: I1008 07:32:51.819740 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-w6cgs" Oct 08 07:32:51 crc kubenswrapper[4693]: I1008 07:32:51.831370 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68clw\" (UniqueName: \"kubernetes.io/projected/68d99e15-182a-46b3-8478-d0f2b3763662-kube-api-access-68clw\") pod \"barbican-db-create-5xhnc\" (UID: \"68d99e15-182a-46b3-8478-d0f2b3763662\") " pod="openstack/barbican-db-create-5xhnc" Oct 08 07:32:51 crc kubenswrapper[4693]: I1008 07:32:51.831496 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clkgn\" (UniqueName: \"kubernetes.io/projected/4a70d58e-b552-4ff1-a632-47debfe9ffbf-kube-api-access-clkgn\") pod \"neutron-db-create-w6cgs\" (UID: \"4a70d58e-b552-4ff1-a632-47debfe9ffbf\") " pod="openstack/neutron-db-create-w6cgs" Oct 08 07:32:51 crc kubenswrapper[4693]: I1008 07:32:51.834035 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-w6cgs"] Oct 08 07:32:51 crc kubenswrapper[4693]: I1008 07:32:51.858973 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68clw\" (UniqueName: \"kubernetes.io/projected/68d99e15-182a-46b3-8478-d0f2b3763662-kube-api-access-68clw\") pod \"barbican-db-create-5xhnc\" (UID: \"68d99e15-182a-46b3-8478-d0f2b3763662\") " pod="openstack/barbican-db-create-5xhnc" Oct 08 07:32:51 crc kubenswrapper[4693]: I1008 07:32:51.872853 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-bg956" Oct 08 07:32:51 crc kubenswrapper[4693]: I1008 07:32:51.932349 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clkgn\" (UniqueName: \"kubernetes.io/projected/4a70d58e-b552-4ff1-a632-47debfe9ffbf-kube-api-access-clkgn\") pod \"neutron-db-create-w6cgs\" (UID: \"4a70d58e-b552-4ff1-a632-47debfe9ffbf\") " pod="openstack/neutron-db-create-w6cgs" Oct 08 07:32:51 crc kubenswrapper[4693]: I1008 07:32:51.949115 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clkgn\" (UniqueName: \"kubernetes.io/projected/4a70d58e-b552-4ff1-a632-47debfe9ffbf-kube-api-access-clkgn\") pod \"neutron-db-create-w6cgs\" (UID: \"4a70d58e-b552-4ff1-a632-47debfe9ffbf\") " pod="openstack/neutron-db-create-w6cgs" Oct 08 07:32:52 crc kubenswrapper[4693]: I1008 07:32:51.994527 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-5xhnc" Oct 08 07:32:52 crc kubenswrapper[4693]: I1008 07:32:52.032597 4693 generic.go:334] "Generic (PLEG): container finished" podID="67801bb8-7e75-48a4-8976-de43d426bc4b" containerID="492a0c81aaf74283b777935b1656d1faba8cac2fd2764d59a1b4d8c28a159cb3" exitCode=0 Oct 08 07:32:52 crc kubenswrapper[4693]: I1008 07:32:52.032637 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-qmltj-config-fqcxl" event={"ID":"67801bb8-7e75-48a4-8976-de43d426bc4b","Type":"ContainerDied","Data":"492a0c81aaf74283b777935b1656d1faba8cac2fd2764d59a1b4d8c28a159cb3"} Oct 08 07:32:52 crc kubenswrapper[4693]: I1008 07:32:52.032662 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-qmltj-config-fqcxl" event={"ID":"67801bb8-7e75-48a4-8976-de43d426bc4b","Type":"ContainerStarted","Data":"d939ef11146b377b1ebd7faaad22b413fc6ccb9b00f50f49d6b2a5b26d2ce98c"} Oct 08 07:32:52 crc kubenswrapper[4693]: I1008 07:32:52.135772 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-w6cgs" Oct 08 07:32:52 crc kubenswrapper[4693]: I1008 07:32:52.310715 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-bg956"] Oct 08 07:32:52 crc kubenswrapper[4693]: W1008 07:32:52.313548 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4e7ed96c_26ab_4895_bcc8_35bba61e5240.slice/crio-80c07d3ce54b37713c3f484d759db9fee0bd8551ecbd8361ac2198eb19e110d2 WatchSource:0}: Error finding container 80c07d3ce54b37713c3f484d759db9fee0bd8551ecbd8361ac2198eb19e110d2: Status 404 returned error can't find the container with id 80c07d3ce54b37713c3f484d759db9fee0bd8551ecbd8361ac2198eb19e110d2 Oct 08 07:32:52 crc kubenswrapper[4693]: I1008 07:32:52.354956 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-w6cgs"] Oct 08 07:32:52 crc kubenswrapper[4693]: W1008 07:32:52.365482 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4a70d58e_b552_4ff1_a632_47debfe9ffbf.slice/crio-dc6c2371b8c027cf8b197594a1e97c4bbdb5325bf69ea2103fba198a45d55610 WatchSource:0}: Error finding container dc6c2371b8c027cf8b197594a1e97c4bbdb5325bf69ea2103fba198a45d55610: Status 404 returned error can't find the container with id dc6c2371b8c027cf8b197594a1e97c4bbdb5325bf69ea2103fba198a45d55610 Oct 08 07:32:52 crc kubenswrapper[4693]: I1008 07:32:52.438572 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-5xhnc"] Oct 08 07:32:52 crc kubenswrapper[4693]: W1008 07:32:52.442125 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod68d99e15_182a_46b3_8478_d0f2b3763662.slice/crio-276ff08228fed62bc238957704f75822f36466b31c44639e66aca93e23dcdf2c WatchSource:0}: Error finding container 276ff08228fed62bc238957704f75822f36466b31c44639e66aca93e23dcdf2c: Status 404 returned error can't find the container with id 276ff08228fed62bc238957704f75822f36466b31c44639e66aca93e23dcdf2c Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.041410 4693 generic.go:334] "Generic (PLEG): container finished" podID="4a70d58e-b552-4ff1-a632-47debfe9ffbf" containerID="e49d78069afa8794b626d0768b44883b7157b83915c4533193528f2f87e0f5ba" exitCode=0 Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.041507 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-w6cgs" event={"ID":"4a70d58e-b552-4ff1-a632-47debfe9ffbf","Type":"ContainerDied","Data":"e49d78069afa8794b626d0768b44883b7157b83915c4533193528f2f87e0f5ba"} Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.041851 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-w6cgs" event={"ID":"4a70d58e-b552-4ff1-a632-47debfe9ffbf","Type":"ContainerStarted","Data":"dc6c2371b8c027cf8b197594a1e97c4bbdb5325bf69ea2103fba198a45d55610"} Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.045686 4693 generic.go:334] "Generic (PLEG): container finished" podID="68d99e15-182a-46b3-8478-d0f2b3763662" containerID="a886b1226d70e20ec5e77d3c402549df1a2160889c6107fdd07ee8e27263ef58" exitCode=0 Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.045787 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-5xhnc" event={"ID":"68d99e15-182a-46b3-8478-d0f2b3763662","Type":"ContainerDied","Data":"a886b1226d70e20ec5e77d3c402549df1a2160889c6107fdd07ee8e27263ef58"} Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.045803 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-5xhnc" event={"ID":"68d99e15-182a-46b3-8478-d0f2b3763662","Type":"ContainerStarted","Data":"276ff08228fed62bc238957704f75822f36466b31c44639e66aca93e23dcdf2c"} Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.048324 4693 generic.go:334] "Generic (PLEG): container finished" podID="4e7ed96c-26ab-4895-bcc8-35bba61e5240" containerID="6a3db8abcfcbbf5f365392873d0f2e33bc7484daa8eb91238b763a8e43385000" exitCode=0 Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.048359 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-bg956" event={"ID":"4e7ed96c-26ab-4895-bcc8-35bba61e5240","Type":"ContainerDied","Data":"6a3db8abcfcbbf5f365392873d0f2e33bc7484daa8eb91238b763a8e43385000"} Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.048432 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-bg956" event={"ID":"4e7ed96c-26ab-4895-bcc8-35bba61e5240","Type":"ContainerStarted","Data":"80c07d3ce54b37713c3f484d759db9fee0bd8551ecbd8361ac2198eb19e110d2"} Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.387696 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-qmltj-config-fqcxl" Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.559059 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/67801bb8-7e75-48a4-8976-de43d426bc4b-scripts\") pod \"67801bb8-7e75-48a4-8976-de43d426bc4b\" (UID: \"67801bb8-7e75-48a4-8976-de43d426bc4b\") " Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.559164 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/67801bb8-7e75-48a4-8976-de43d426bc4b-additional-scripts\") pod \"67801bb8-7e75-48a4-8976-de43d426bc4b\" (UID: \"67801bb8-7e75-48a4-8976-de43d426bc4b\") " Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.559202 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/67801bb8-7e75-48a4-8976-de43d426bc4b-var-run\") pod \"67801bb8-7e75-48a4-8976-de43d426bc4b\" (UID: \"67801bb8-7e75-48a4-8976-de43d426bc4b\") " Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.559367 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-67gpb\" (UniqueName: \"kubernetes.io/projected/67801bb8-7e75-48a4-8976-de43d426bc4b-kube-api-access-67gpb\") pod \"67801bb8-7e75-48a4-8976-de43d426bc4b\" (UID: \"67801bb8-7e75-48a4-8976-de43d426bc4b\") " Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.559372 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/67801bb8-7e75-48a4-8976-de43d426bc4b-var-run" (OuterVolumeSpecName: "var-run") pod "67801bb8-7e75-48a4-8976-de43d426bc4b" (UID: "67801bb8-7e75-48a4-8976-de43d426bc4b"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.559397 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/67801bb8-7e75-48a4-8976-de43d426bc4b-var-run-ovn\") pod \"67801bb8-7e75-48a4-8976-de43d426bc4b\" (UID: \"67801bb8-7e75-48a4-8976-de43d426bc4b\") " Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.559444 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/67801bb8-7e75-48a4-8976-de43d426bc4b-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "67801bb8-7e75-48a4-8976-de43d426bc4b" (UID: "67801bb8-7e75-48a4-8976-de43d426bc4b"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.559456 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/67801bb8-7e75-48a4-8976-de43d426bc4b-var-log-ovn\") pod \"67801bb8-7e75-48a4-8976-de43d426bc4b\" (UID: \"67801bb8-7e75-48a4-8976-de43d426bc4b\") " Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.559531 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/67801bb8-7e75-48a4-8976-de43d426bc4b-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "67801bb8-7e75-48a4-8976-de43d426bc4b" (UID: "67801bb8-7e75-48a4-8976-de43d426bc4b"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.559909 4693 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/67801bb8-7e75-48a4-8976-de43d426bc4b-var-run\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.559932 4693 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/67801bb8-7e75-48a4-8976-de43d426bc4b-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.559943 4693 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/67801bb8-7e75-48a4-8976-de43d426bc4b-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.560028 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67801bb8-7e75-48a4-8976-de43d426bc4b-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "67801bb8-7e75-48a4-8976-de43d426bc4b" (UID: "67801bb8-7e75-48a4-8976-de43d426bc4b"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.560345 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67801bb8-7e75-48a4-8976-de43d426bc4b-scripts" (OuterVolumeSpecName: "scripts") pod "67801bb8-7e75-48a4-8976-de43d426bc4b" (UID: "67801bb8-7e75-48a4-8976-de43d426bc4b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.565788 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67801bb8-7e75-48a4-8976-de43d426bc4b-kube-api-access-67gpb" (OuterVolumeSpecName: "kube-api-access-67gpb") pod "67801bb8-7e75-48a4-8976-de43d426bc4b" (UID: "67801bb8-7e75-48a4-8976-de43d426bc4b"). InnerVolumeSpecName "kube-api-access-67gpb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.661331 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-67gpb\" (UniqueName: \"kubernetes.io/projected/67801bb8-7e75-48a4-8976-de43d426bc4b-kube-api-access-67gpb\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.661964 4693 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/67801bb8-7e75-48a4-8976-de43d426bc4b-scripts\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:53 crc kubenswrapper[4693]: I1008 07:32:53.662055 4693 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/67801bb8-7e75-48a4-8976-de43d426bc4b-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.059313 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-qmltj-config-fqcxl" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.059342 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-qmltj-config-fqcxl" event={"ID":"67801bb8-7e75-48a4-8976-de43d426bc4b","Type":"ContainerDied","Data":"d939ef11146b377b1ebd7faaad22b413fc6ccb9b00f50f49d6b2a5b26d2ce98c"} Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.059403 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d939ef11146b377b1ebd7faaad22b413fc6ccb9b00f50f49d6b2a5b26d2ce98c" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.219623 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-003f-account-create-s8v4s"] Oct 08 07:32:54 crc kubenswrapper[4693]: E1008 07:32:54.220174 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67801bb8-7e75-48a4-8976-de43d426bc4b" containerName="ovn-config" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.220201 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="67801bb8-7e75-48a4-8976-de43d426bc4b" containerName="ovn-config" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.220408 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="67801bb8-7e75-48a4-8976-de43d426bc4b" containerName="ovn-config" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.221101 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-003f-account-create-s8v4s" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.223957 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.229691 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-003f-account-create-s8v4s"] Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.273349 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvjd8\" (UniqueName: \"kubernetes.io/projected/1b0a8408-551b-4cbc-996c-9ddc40c25642-kube-api-access-pvjd8\") pod \"keystone-003f-account-create-s8v4s\" (UID: \"1b0a8408-551b-4cbc-996c-9ddc40c25642\") " pod="openstack/keystone-003f-account-create-s8v4s" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.375399 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvjd8\" (UniqueName: \"kubernetes.io/projected/1b0a8408-551b-4cbc-996c-9ddc40c25642-kube-api-access-pvjd8\") pod \"keystone-003f-account-create-s8v4s\" (UID: \"1b0a8408-551b-4cbc-996c-9ddc40c25642\") " pod="openstack/keystone-003f-account-create-s8v4s" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.391483 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvjd8\" (UniqueName: \"kubernetes.io/projected/1b0a8408-551b-4cbc-996c-9ddc40c25642-kube-api-access-pvjd8\") pod \"keystone-003f-account-create-s8v4s\" (UID: \"1b0a8408-551b-4cbc-996c-9ddc40c25642\") " pod="openstack/keystone-003f-account-create-s8v4s" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.478052 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-qmltj-config-fqcxl"] Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.501788 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-qmltj-config-fqcxl"] Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.522284 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-w6cgs" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.527258 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-bg956" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.545557 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-003f-account-create-s8v4s" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.553900 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-5xhnc" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.568014 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-e9f5-account-create-4vwfk"] Oct 08 07:32:54 crc kubenswrapper[4693]: E1008 07:32:54.570367 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e7ed96c-26ab-4895-bcc8-35bba61e5240" containerName="mariadb-database-create" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.570500 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e7ed96c-26ab-4895-bcc8-35bba61e5240" containerName="mariadb-database-create" Oct 08 07:32:54 crc kubenswrapper[4693]: E1008 07:32:54.570531 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68d99e15-182a-46b3-8478-d0f2b3763662" containerName="mariadb-database-create" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.570542 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="68d99e15-182a-46b3-8478-d0f2b3763662" containerName="mariadb-database-create" Oct 08 07:32:54 crc kubenswrapper[4693]: E1008 07:32:54.570746 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a70d58e-b552-4ff1-a632-47debfe9ffbf" containerName="mariadb-database-create" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.570766 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a70d58e-b552-4ff1-a632-47debfe9ffbf" containerName="mariadb-database-create" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.572124 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a70d58e-b552-4ff1-a632-47debfe9ffbf" containerName="mariadb-database-create" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.572169 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e7ed96c-26ab-4895-bcc8-35bba61e5240" containerName="mariadb-database-create" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.572188 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="68d99e15-182a-46b3-8478-d0f2b3763662" containerName="mariadb-database-create" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.573416 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e9f5-account-create-4vwfk" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.578401 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-clkgn\" (UniqueName: \"kubernetes.io/projected/4a70d58e-b552-4ff1-a632-47debfe9ffbf-kube-api-access-clkgn\") pod \"4a70d58e-b552-4ff1-a632-47debfe9ffbf\" (UID: \"4a70d58e-b552-4ff1-a632-47debfe9ffbf\") " Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.578749 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zk7dz\" (UniqueName: \"kubernetes.io/projected/b56ccec5-1b44-4461-8d85-45b35e108b39-kube-api-access-zk7dz\") pod \"placement-e9f5-account-create-4vwfk\" (UID: \"b56ccec5-1b44-4461-8d85-45b35e108b39\") " pod="openstack/placement-e9f5-account-create-4vwfk" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.579264 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.580916 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-e9f5-account-create-4vwfk"] Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.588023 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a70d58e-b552-4ff1-a632-47debfe9ffbf-kube-api-access-clkgn" (OuterVolumeSpecName: "kube-api-access-clkgn") pod "4a70d58e-b552-4ff1-a632-47debfe9ffbf" (UID: "4a70d58e-b552-4ff1-a632-47debfe9ffbf"). InnerVolumeSpecName "kube-api-access-clkgn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.680920 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-68clw\" (UniqueName: \"kubernetes.io/projected/68d99e15-182a-46b3-8478-d0f2b3763662-kube-api-access-68clw\") pod \"68d99e15-182a-46b3-8478-d0f2b3763662\" (UID: \"68d99e15-182a-46b3-8478-d0f2b3763662\") " Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.680998 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qj2df\" (UniqueName: \"kubernetes.io/projected/4e7ed96c-26ab-4895-bcc8-35bba61e5240-kube-api-access-qj2df\") pod \"4e7ed96c-26ab-4895-bcc8-35bba61e5240\" (UID: \"4e7ed96c-26ab-4895-bcc8-35bba61e5240\") " Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.681176 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zk7dz\" (UniqueName: \"kubernetes.io/projected/b56ccec5-1b44-4461-8d85-45b35e108b39-kube-api-access-zk7dz\") pod \"placement-e9f5-account-create-4vwfk\" (UID: \"b56ccec5-1b44-4461-8d85-45b35e108b39\") " pod="openstack/placement-e9f5-account-create-4vwfk" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.681240 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-clkgn\" (UniqueName: \"kubernetes.io/projected/4a70d58e-b552-4ff1-a632-47debfe9ffbf-kube-api-access-clkgn\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.685754 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e7ed96c-26ab-4895-bcc8-35bba61e5240-kube-api-access-qj2df" (OuterVolumeSpecName: "kube-api-access-qj2df") pod "4e7ed96c-26ab-4895-bcc8-35bba61e5240" (UID: "4e7ed96c-26ab-4895-bcc8-35bba61e5240"). InnerVolumeSpecName "kube-api-access-qj2df". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.685987 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68d99e15-182a-46b3-8478-d0f2b3763662-kube-api-access-68clw" (OuterVolumeSpecName: "kube-api-access-68clw") pod "68d99e15-182a-46b3-8478-d0f2b3763662" (UID: "68d99e15-182a-46b3-8478-d0f2b3763662"). InnerVolumeSpecName "kube-api-access-68clw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.700585 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zk7dz\" (UniqueName: \"kubernetes.io/projected/b56ccec5-1b44-4461-8d85-45b35e108b39-kube-api-access-zk7dz\") pod \"placement-e9f5-account-create-4vwfk\" (UID: \"b56ccec5-1b44-4461-8d85-45b35e108b39\") " pod="openstack/placement-e9f5-account-create-4vwfk" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.782208 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-68clw\" (UniqueName: \"kubernetes.io/projected/68d99e15-182a-46b3-8478-d0f2b3763662-kube-api-access-68clw\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.782237 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qj2df\" (UniqueName: \"kubernetes.io/projected/4e7ed96c-26ab-4895-bcc8-35bba61e5240-kube-api-access-qj2df\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.900861 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-k4zk2"] Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.902054 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-k4zk2" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.904406 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-z69ht" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.905201 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.911141 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-k4zk2"] Oct 08 07:32:54 crc kubenswrapper[4693]: I1008 07:32:54.922885 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e9f5-account-create-4vwfk" Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.009559 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-003f-account-create-s8v4s"] Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.073845 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-qmltj" Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.080694 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-003f-account-create-s8v4s" event={"ID":"1b0a8408-551b-4cbc-996c-9ddc40c25642","Type":"ContainerStarted","Data":"332ddd5b65c599a0fbc254a5cc513a902d1f37ca8231af7188ba47e0e60e2880"} Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.084844 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-w6cgs" event={"ID":"4a70d58e-b552-4ff1-a632-47debfe9ffbf","Type":"ContainerDied","Data":"dc6c2371b8c027cf8b197594a1e97c4bbdb5325bf69ea2103fba198a45d55610"} Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.084875 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc6c2371b8c027cf8b197594a1e97c4bbdb5325bf69ea2103fba198a45d55610" Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.084939 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-w6cgs" Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.087219 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4-combined-ca-bundle\") pod \"glance-db-sync-k4zk2\" (UID: \"c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4\") " pod="openstack/glance-db-sync-k4zk2" Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.087302 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwqmc\" (UniqueName: \"kubernetes.io/projected/c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4-kube-api-access-lwqmc\") pod \"glance-db-sync-k4zk2\" (UID: \"c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4\") " pod="openstack/glance-db-sync-k4zk2" Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.088129 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4-config-data\") pod \"glance-db-sync-k4zk2\" (UID: \"c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4\") " pod="openstack/glance-db-sync-k4zk2" Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.088291 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4-db-sync-config-data\") pod \"glance-db-sync-k4zk2\" (UID: \"c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4\") " pod="openstack/glance-db-sync-k4zk2" Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.092626 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-5xhnc" event={"ID":"68d99e15-182a-46b3-8478-d0f2b3763662","Type":"ContainerDied","Data":"276ff08228fed62bc238957704f75822f36466b31c44639e66aca93e23dcdf2c"} Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.092658 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="276ff08228fed62bc238957704f75822f36466b31c44639e66aca93e23dcdf2c" Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.092716 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-5xhnc" Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.096554 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-bg956" event={"ID":"4e7ed96c-26ab-4895-bcc8-35bba61e5240","Type":"ContainerDied","Data":"80c07d3ce54b37713c3f484d759db9fee0bd8551ecbd8361ac2198eb19e110d2"} Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.096589 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="80c07d3ce54b37713c3f484d759db9fee0bd8551ecbd8361ac2198eb19e110d2" Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.096643 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-bg956" Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.189892 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4-combined-ca-bundle\") pod \"glance-db-sync-k4zk2\" (UID: \"c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4\") " pod="openstack/glance-db-sync-k4zk2" Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.189989 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwqmc\" (UniqueName: \"kubernetes.io/projected/c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4-kube-api-access-lwqmc\") pod \"glance-db-sync-k4zk2\" (UID: \"c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4\") " pod="openstack/glance-db-sync-k4zk2" Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.190054 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4-config-data\") pod \"glance-db-sync-k4zk2\" (UID: \"c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4\") " pod="openstack/glance-db-sync-k4zk2" Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.190116 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4-db-sync-config-data\") pod \"glance-db-sync-k4zk2\" (UID: \"c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4\") " pod="openstack/glance-db-sync-k4zk2" Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.194395 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4-db-sync-config-data\") pod \"glance-db-sync-k4zk2\" (UID: \"c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4\") " pod="openstack/glance-db-sync-k4zk2" Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.194995 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4-combined-ca-bundle\") pod \"glance-db-sync-k4zk2\" (UID: \"c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4\") " pod="openstack/glance-db-sync-k4zk2" Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.199896 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4-config-data\") pod \"glance-db-sync-k4zk2\" (UID: \"c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4\") " pod="openstack/glance-db-sync-k4zk2" Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.214051 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwqmc\" (UniqueName: \"kubernetes.io/projected/c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4-kube-api-access-lwqmc\") pod \"glance-db-sync-k4zk2\" (UID: \"c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4\") " pod="openstack/glance-db-sync-k4zk2" Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.218758 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-k4zk2" Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.398744 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67801bb8-7e75-48a4-8976-de43d426bc4b" path="/var/lib/kubelet/pods/67801bb8-7e75-48a4-8976-de43d426bc4b/volumes" Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.413669 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-e9f5-account-create-4vwfk"] Oct 08 07:32:55 crc kubenswrapper[4693]: W1008 07:32:55.433057 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb56ccec5_1b44_4461_8d85_45b35e108b39.slice/crio-5ac4e0469cfd90c3249d7b863cbbcf85322409c5837b7949a99e7909d6b19f8a WatchSource:0}: Error finding container 5ac4e0469cfd90c3249d7b863cbbcf85322409c5837b7949a99e7909d6b19f8a: Status 404 returned error can't find the container with id 5ac4e0469cfd90c3249d7b863cbbcf85322409c5837b7949a99e7909d6b19f8a Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.443186 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Oct 08 07:32:55 crc kubenswrapper[4693]: I1008 07:32:55.722926 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-k4zk2"] Oct 08 07:32:56 crc kubenswrapper[4693]: I1008 07:32:56.105687 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-k4zk2" event={"ID":"c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4","Type":"ContainerStarted","Data":"caa9de3f79a7d4e7bce53affaa7c1d7f0d87a0b5115e6e0c3a13db523b313be7"} Oct 08 07:32:56 crc kubenswrapper[4693]: I1008 07:32:56.109143 4693 generic.go:334] "Generic (PLEG): container finished" podID="1b0a8408-551b-4cbc-996c-9ddc40c25642" containerID="58d65e6065311e9a0ef56636ed6d2a5e0d289209a91bc6b2270cf4eaf6b31f73" exitCode=0 Oct 08 07:32:56 crc kubenswrapper[4693]: I1008 07:32:56.109393 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-003f-account-create-s8v4s" event={"ID":"1b0a8408-551b-4cbc-996c-9ddc40c25642","Type":"ContainerDied","Data":"58d65e6065311e9a0ef56636ed6d2a5e0d289209a91bc6b2270cf4eaf6b31f73"} Oct 08 07:32:56 crc kubenswrapper[4693]: I1008 07:32:56.111330 4693 generic.go:334] "Generic (PLEG): container finished" podID="b56ccec5-1b44-4461-8d85-45b35e108b39" containerID="5ff9d3b88c4c586ef50657ee69e1fc6c5d2b9f4a2b646ca4399c5c2378791e44" exitCode=0 Oct 08 07:32:56 crc kubenswrapper[4693]: I1008 07:32:56.111395 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-e9f5-account-create-4vwfk" event={"ID":"b56ccec5-1b44-4461-8d85-45b35e108b39","Type":"ContainerDied","Data":"5ff9d3b88c4c586ef50657ee69e1fc6c5d2b9f4a2b646ca4399c5c2378791e44"} Oct 08 07:32:56 crc kubenswrapper[4693]: I1008 07:32:56.111435 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-e9f5-account-create-4vwfk" event={"ID":"b56ccec5-1b44-4461-8d85-45b35e108b39","Type":"ContainerStarted","Data":"5ac4e0469cfd90c3249d7b863cbbcf85322409c5837b7949a99e7909d6b19f8a"} Oct 08 07:32:57 crc kubenswrapper[4693]: I1008 07:32:57.462188 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e9f5-account-create-4vwfk" Oct 08 07:32:57 crc kubenswrapper[4693]: I1008 07:32:57.467557 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-003f-account-create-s8v4s" Oct 08 07:32:57 crc kubenswrapper[4693]: I1008 07:32:57.551191 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pvjd8\" (UniqueName: \"kubernetes.io/projected/1b0a8408-551b-4cbc-996c-9ddc40c25642-kube-api-access-pvjd8\") pod \"1b0a8408-551b-4cbc-996c-9ddc40c25642\" (UID: \"1b0a8408-551b-4cbc-996c-9ddc40c25642\") " Oct 08 07:32:57 crc kubenswrapper[4693]: I1008 07:32:57.551265 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zk7dz\" (UniqueName: \"kubernetes.io/projected/b56ccec5-1b44-4461-8d85-45b35e108b39-kube-api-access-zk7dz\") pod \"b56ccec5-1b44-4461-8d85-45b35e108b39\" (UID: \"b56ccec5-1b44-4461-8d85-45b35e108b39\") " Oct 08 07:32:57 crc kubenswrapper[4693]: I1008 07:32:57.558919 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b56ccec5-1b44-4461-8d85-45b35e108b39-kube-api-access-zk7dz" (OuterVolumeSpecName: "kube-api-access-zk7dz") pod "b56ccec5-1b44-4461-8d85-45b35e108b39" (UID: "b56ccec5-1b44-4461-8d85-45b35e108b39"). InnerVolumeSpecName "kube-api-access-zk7dz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:32:57 crc kubenswrapper[4693]: I1008 07:32:57.559297 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b0a8408-551b-4cbc-996c-9ddc40c25642-kube-api-access-pvjd8" (OuterVolumeSpecName: "kube-api-access-pvjd8") pod "1b0a8408-551b-4cbc-996c-9ddc40c25642" (UID: "1b0a8408-551b-4cbc-996c-9ddc40c25642"). InnerVolumeSpecName "kube-api-access-pvjd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:32:57 crc kubenswrapper[4693]: I1008 07:32:57.653177 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zk7dz\" (UniqueName: \"kubernetes.io/projected/b56ccec5-1b44-4461-8d85-45b35e108b39-kube-api-access-zk7dz\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:57 crc kubenswrapper[4693]: I1008 07:32:57.653221 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pvjd8\" (UniqueName: \"kubernetes.io/projected/1b0a8408-551b-4cbc-996c-9ddc40c25642-kube-api-access-pvjd8\") on node \"crc\" DevicePath \"\"" Oct 08 07:32:58 crc kubenswrapper[4693]: I1008 07:32:58.130306 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e9f5-account-create-4vwfk" Oct 08 07:32:58 crc kubenswrapper[4693]: I1008 07:32:58.130354 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-e9f5-account-create-4vwfk" event={"ID":"b56ccec5-1b44-4461-8d85-45b35e108b39","Type":"ContainerDied","Data":"5ac4e0469cfd90c3249d7b863cbbcf85322409c5837b7949a99e7909d6b19f8a"} Oct 08 07:32:58 crc kubenswrapper[4693]: I1008 07:32:58.130731 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ac4e0469cfd90c3249d7b863cbbcf85322409c5837b7949a99e7909d6b19f8a" Oct 08 07:32:58 crc kubenswrapper[4693]: I1008 07:32:58.132360 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-003f-account-create-s8v4s" event={"ID":"1b0a8408-551b-4cbc-996c-9ddc40c25642","Type":"ContainerDied","Data":"332ddd5b65c599a0fbc254a5cc513a902d1f37ca8231af7188ba47e0e60e2880"} Oct 08 07:32:58 crc kubenswrapper[4693]: I1008 07:32:58.132391 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="332ddd5b65c599a0fbc254a5cc513a902d1f37ca8231af7188ba47e0e60e2880" Oct 08 07:32:58 crc kubenswrapper[4693]: I1008 07:32:58.132449 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-003f-account-create-s8v4s" Oct 08 07:32:59 crc kubenswrapper[4693]: I1008 07:32:59.278651 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/298a15e0-992f-4f83-8067-7e8e6aa47b89-etc-swift\") pod \"swift-storage-0\" (UID: \"298a15e0-992f-4f83-8067-7e8e6aa47b89\") " pod="openstack/swift-storage-0" Oct 08 07:32:59 crc kubenswrapper[4693]: I1008 07:32:59.285722 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/298a15e0-992f-4f83-8067-7e8e6aa47b89-etc-swift\") pod \"swift-storage-0\" (UID: \"298a15e0-992f-4f83-8067-7e8e6aa47b89\") " pod="openstack/swift-storage-0" Oct 08 07:32:59 crc kubenswrapper[4693]: I1008 07:32:59.488171 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 08 07:32:59 crc kubenswrapper[4693]: I1008 07:32:59.797726 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-p8hg9"] Oct 08 07:32:59 crc kubenswrapper[4693]: E1008 07:32:59.798212 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b56ccec5-1b44-4461-8d85-45b35e108b39" containerName="mariadb-account-create" Oct 08 07:32:59 crc kubenswrapper[4693]: I1008 07:32:59.798229 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="b56ccec5-1b44-4461-8d85-45b35e108b39" containerName="mariadb-account-create" Oct 08 07:32:59 crc kubenswrapper[4693]: E1008 07:32:59.798247 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b0a8408-551b-4cbc-996c-9ddc40c25642" containerName="mariadb-account-create" Oct 08 07:32:59 crc kubenswrapper[4693]: I1008 07:32:59.798253 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b0a8408-551b-4cbc-996c-9ddc40c25642" containerName="mariadb-account-create" Oct 08 07:32:59 crc kubenswrapper[4693]: I1008 07:32:59.798430 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="b56ccec5-1b44-4461-8d85-45b35e108b39" containerName="mariadb-account-create" Oct 08 07:32:59 crc kubenswrapper[4693]: I1008 07:32:59.798454 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b0a8408-551b-4cbc-996c-9ddc40c25642" containerName="mariadb-account-create" Oct 08 07:32:59 crc kubenswrapper[4693]: I1008 07:32:59.799079 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-p8hg9" Oct 08 07:32:59 crc kubenswrapper[4693]: I1008 07:32:59.802061 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 08 07:32:59 crc kubenswrapper[4693]: I1008 07:32:59.802950 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-nd6nt" Oct 08 07:32:59 crc kubenswrapper[4693]: I1008 07:32:59.803619 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 08 07:32:59 crc kubenswrapper[4693]: I1008 07:32:59.803741 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 08 07:32:59 crc kubenswrapper[4693]: I1008 07:32:59.804546 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-p8hg9"] Oct 08 07:32:59 crc kubenswrapper[4693]: I1008 07:32:59.889484 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15ef4c47-e897-4afe-839d-31e54512a16b-combined-ca-bundle\") pod \"keystone-db-sync-p8hg9\" (UID: \"15ef4c47-e897-4afe-839d-31e54512a16b\") " pod="openstack/keystone-db-sync-p8hg9" Oct 08 07:32:59 crc kubenswrapper[4693]: I1008 07:32:59.889592 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkjtc\" (UniqueName: \"kubernetes.io/projected/15ef4c47-e897-4afe-839d-31e54512a16b-kube-api-access-nkjtc\") pod \"keystone-db-sync-p8hg9\" (UID: \"15ef4c47-e897-4afe-839d-31e54512a16b\") " pod="openstack/keystone-db-sync-p8hg9" Oct 08 07:32:59 crc kubenswrapper[4693]: I1008 07:32:59.889620 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15ef4c47-e897-4afe-839d-31e54512a16b-config-data\") pod \"keystone-db-sync-p8hg9\" (UID: \"15ef4c47-e897-4afe-839d-31e54512a16b\") " pod="openstack/keystone-db-sync-p8hg9" Oct 08 07:32:59 crc kubenswrapper[4693]: I1008 07:32:59.991274 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkjtc\" (UniqueName: \"kubernetes.io/projected/15ef4c47-e897-4afe-839d-31e54512a16b-kube-api-access-nkjtc\") pod \"keystone-db-sync-p8hg9\" (UID: \"15ef4c47-e897-4afe-839d-31e54512a16b\") " pod="openstack/keystone-db-sync-p8hg9" Oct 08 07:32:59 crc kubenswrapper[4693]: I1008 07:32:59.991319 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15ef4c47-e897-4afe-839d-31e54512a16b-config-data\") pod \"keystone-db-sync-p8hg9\" (UID: \"15ef4c47-e897-4afe-839d-31e54512a16b\") " pod="openstack/keystone-db-sync-p8hg9" Oct 08 07:32:59 crc kubenswrapper[4693]: I1008 07:32:59.991387 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15ef4c47-e897-4afe-839d-31e54512a16b-combined-ca-bundle\") pod \"keystone-db-sync-p8hg9\" (UID: \"15ef4c47-e897-4afe-839d-31e54512a16b\") " pod="openstack/keystone-db-sync-p8hg9" Oct 08 07:33:00 crc kubenswrapper[4693]: I1008 07:32:59.999905 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15ef4c47-e897-4afe-839d-31e54512a16b-config-data\") pod \"keystone-db-sync-p8hg9\" (UID: \"15ef4c47-e897-4afe-839d-31e54512a16b\") " pod="openstack/keystone-db-sync-p8hg9" Oct 08 07:33:00 crc kubenswrapper[4693]: I1008 07:33:00.015598 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkjtc\" (UniqueName: \"kubernetes.io/projected/15ef4c47-e897-4afe-839d-31e54512a16b-kube-api-access-nkjtc\") pod \"keystone-db-sync-p8hg9\" (UID: \"15ef4c47-e897-4afe-839d-31e54512a16b\") " pod="openstack/keystone-db-sync-p8hg9" Oct 08 07:33:00 crc kubenswrapper[4693]: I1008 07:33:00.015704 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15ef4c47-e897-4afe-839d-31e54512a16b-combined-ca-bundle\") pod \"keystone-db-sync-p8hg9\" (UID: \"15ef4c47-e897-4afe-839d-31e54512a16b\") " pod="openstack/keystone-db-sync-p8hg9" Oct 08 07:33:00 crc kubenswrapper[4693]: I1008 07:33:00.085333 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 08 07:33:00 crc kubenswrapper[4693]: W1008 07:33:00.095770 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod298a15e0_992f_4f83_8067_7e8e6aa47b89.slice/crio-8f782808472046fa36bc0f16d7a531453cb702d371c09e8042827adc8904bade WatchSource:0}: Error finding container 8f782808472046fa36bc0f16d7a531453cb702d371c09e8042827adc8904bade: Status 404 returned error can't find the container with id 8f782808472046fa36bc0f16d7a531453cb702d371c09e8042827adc8904bade Oct 08 07:33:00 crc kubenswrapper[4693]: I1008 07:33:00.132325 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-p8hg9" Oct 08 07:33:00 crc kubenswrapper[4693]: I1008 07:33:00.150033 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"298a15e0-992f-4f83-8067-7e8e6aa47b89","Type":"ContainerStarted","Data":"8f782808472046fa36bc0f16d7a531453cb702d371c09e8042827adc8904bade"} Oct 08 07:33:00 crc kubenswrapper[4693]: I1008 07:33:00.542338 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-p8hg9"] Oct 08 07:33:00 crc kubenswrapper[4693]: W1008 07:33:00.552008 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod15ef4c47_e897_4afe_839d_31e54512a16b.slice/crio-fcb3211833a0f8965834bd1da6b97bd01da12010171eb4fbb0a128f207dfb9b1 WatchSource:0}: Error finding container fcb3211833a0f8965834bd1da6b97bd01da12010171eb4fbb0a128f207dfb9b1: Status 404 returned error can't find the container with id fcb3211833a0f8965834bd1da6b97bd01da12010171eb4fbb0a128f207dfb9b1 Oct 08 07:33:01 crc kubenswrapper[4693]: I1008 07:33:01.159699 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-p8hg9" event={"ID":"15ef4c47-e897-4afe-839d-31e54512a16b","Type":"ContainerStarted","Data":"fcb3211833a0f8965834bd1da6b97bd01da12010171eb4fbb0a128f207dfb9b1"} Oct 08 07:33:01 crc kubenswrapper[4693]: I1008 07:33:01.558463 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-b816-account-create-vwpn5"] Oct 08 07:33:01 crc kubenswrapper[4693]: I1008 07:33:01.559979 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-b816-account-create-vwpn5" Oct 08 07:33:01 crc kubenswrapper[4693]: I1008 07:33:01.564974 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Oct 08 07:33:01 crc kubenswrapper[4693]: I1008 07:33:01.579224 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-b816-account-create-vwpn5"] Oct 08 07:33:01 crc kubenswrapper[4693]: I1008 07:33:01.619119 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcczw\" (UniqueName: \"kubernetes.io/projected/755f3d62-7b37-470b-94ae-9e9c6a7df2d1-kube-api-access-dcczw\") pod \"cinder-b816-account-create-vwpn5\" (UID: \"755f3d62-7b37-470b-94ae-9e9c6a7df2d1\") " pod="openstack/cinder-b816-account-create-vwpn5" Oct 08 07:33:01 crc kubenswrapper[4693]: I1008 07:33:01.721321 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcczw\" (UniqueName: \"kubernetes.io/projected/755f3d62-7b37-470b-94ae-9e9c6a7df2d1-kube-api-access-dcczw\") pod \"cinder-b816-account-create-vwpn5\" (UID: \"755f3d62-7b37-470b-94ae-9e9c6a7df2d1\") " pod="openstack/cinder-b816-account-create-vwpn5" Oct 08 07:33:01 crc kubenswrapper[4693]: I1008 07:33:01.757703 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcczw\" (UniqueName: \"kubernetes.io/projected/755f3d62-7b37-470b-94ae-9e9c6a7df2d1-kube-api-access-dcczw\") pod \"cinder-b816-account-create-vwpn5\" (UID: \"755f3d62-7b37-470b-94ae-9e9c6a7df2d1\") " pod="openstack/cinder-b816-account-create-vwpn5" Oct 08 07:33:01 crc kubenswrapper[4693]: I1008 07:33:01.790397 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-6e46-account-create-cd8wf"] Oct 08 07:33:01 crc kubenswrapper[4693]: I1008 07:33:01.791611 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6e46-account-create-cd8wf" Oct 08 07:33:01 crc kubenswrapper[4693]: I1008 07:33:01.794025 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Oct 08 07:33:01 crc kubenswrapper[4693]: I1008 07:33:01.804023 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-6e46-account-create-cd8wf"] Oct 08 07:33:01 crc kubenswrapper[4693]: I1008 07:33:01.822596 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nw48f\" (UniqueName: \"kubernetes.io/projected/0668b2a6-74b5-4ac0-83f2-de6f7858c66c-kube-api-access-nw48f\") pod \"barbican-6e46-account-create-cd8wf\" (UID: \"0668b2a6-74b5-4ac0-83f2-de6f7858c66c\") " pod="openstack/barbican-6e46-account-create-cd8wf" Oct 08 07:33:01 crc kubenswrapper[4693]: I1008 07:33:01.881401 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-b816-account-create-vwpn5" Oct 08 07:33:01 crc kubenswrapper[4693]: I1008 07:33:01.924140 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nw48f\" (UniqueName: \"kubernetes.io/projected/0668b2a6-74b5-4ac0-83f2-de6f7858c66c-kube-api-access-nw48f\") pod \"barbican-6e46-account-create-cd8wf\" (UID: \"0668b2a6-74b5-4ac0-83f2-de6f7858c66c\") " pod="openstack/barbican-6e46-account-create-cd8wf" Oct 08 07:33:01 crc kubenswrapper[4693]: I1008 07:33:01.959634 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-66d0-account-create-nvqcp"] Oct 08 07:33:01 crc kubenswrapper[4693]: I1008 07:33:01.960717 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-66d0-account-create-nvqcp" Oct 08 07:33:01 crc kubenswrapper[4693]: I1008 07:33:01.965211 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nw48f\" (UniqueName: \"kubernetes.io/projected/0668b2a6-74b5-4ac0-83f2-de6f7858c66c-kube-api-access-nw48f\") pod \"barbican-6e46-account-create-cd8wf\" (UID: \"0668b2a6-74b5-4ac0-83f2-de6f7858c66c\") " pod="openstack/barbican-6e46-account-create-cd8wf" Oct 08 07:33:01 crc kubenswrapper[4693]: I1008 07:33:01.965242 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Oct 08 07:33:01 crc kubenswrapper[4693]: I1008 07:33:01.971216 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-66d0-account-create-nvqcp"] Oct 08 07:33:02 crc kubenswrapper[4693]: I1008 07:33:02.025714 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcqnj\" (UniqueName: \"kubernetes.io/projected/a7a20db2-171f-4856-a3e0-416a6ad7c27c-kube-api-access-tcqnj\") pod \"neutron-66d0-account-create-nvqcp\" (UID: \"a7a20db2-171f-4856-a3e0-416a6ad7c27c\") " pod="openstack/neutron-66d0-account-create-nvqcp" Oct 08 07:33:02 crc kubenswrapper[4693]: I1008 07:33:02.114459 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6e46-account-create-cd8wf" Oct 08 07:33:02 crc kubenswrapper[4693]: I1008 07:33:02.127460 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcqnj\" (UniqueName: \"kubernetes.io/projected/a7a20db2-171f-4856-a3e0-416a6ad7c27c-kube-api-access-tcqnj\") pod \"neutron-66d0-account-create-nvqcp\" (UID: \"a7a20db2-171f-4856-a3e0-416a6ad7c27c\") " pod="openstack/neutron-66d0-account-create-nvqcp" Oct 08 07:33:02 crc kubenswrapper[4693]: I1008 07:33:02.145196 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcqnj\" (UniqueName: \"kubernetes.io/projected/a7a20db2-171f-4856-a3e0-416a6ad7c27c-kube-api-access-tcqnj\") pod \"neutron-66d0-account-create-nvqcp\" (UID: \"a7a20db2-171f-4856-a3e0-416a6ad7c27c\") " pod="openstack/neutron-66d0-account-create-nvqcp" Oct 08 07:33:02 crc kubenswrapper[4693]: I1008 07:33:02.314127 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-66d0-account-create-nvqcp" Oct 08 07:33:08 crc kubenswrapper[4693]: I1008 07:33:08.562906 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-66d0-account-create-nvqcp"] Oct 08 07:33:08 crc kubenswrapper[4693]: I1008 07:33:08.577310 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-b816-account-create-vwpn5"] Oct 08 07:33:08 crc kubenswrapper[4693]: I1008 07:33:08.579721 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-6e46-account-create-cd8wf"] Oct 08 07:33:09 crc kubenswrapper[4693]: I1008 07:33:09.246324 4693 generic.go:334] "Generic (PLEG): container finished" podID="755f3d62-7b37-470b-94ae-9e9c6a7df2d1" containerID="df20617ee487cf96aef76706c8af9bf446bd542042bb9cc2f95eb8bf9b42039e" exitCode=0 Oct 08 07:33:09 crc kubenswrapper[4693]: I1008 07:33:09.246670 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-b816-account-create-vwpn5" event={"ID":"755f3d62-7b37-470b-94ae-9e9c6a7df2d1","Type":"ContainerDied","Data":"df20617ee487cf96aef76706c8af9bf446bd542042bb9cc2f95eb8bf9b42039e"} Oct 08 07:33:09 crc kubenswrapper[4693]: I1008 07:33:09.246696 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-b816-account-create-vwpn5" event={"ID":"755f3d62-7b37-470b-94ae-9e9c6a7df2d1","Type":"ContainerStarted","Data":"49f98afd6bf9f8dcd02f736fdc993c0828a44b6887d62e1f05ecb8e946111b76"} Oct 08 07:33:09 crc kubenswrapper[4693]: I1008 07:33:09.249635 4693 generic.go:334] "Generic (PLEG): container finished" podID="0668b2a6-74b5-4ac0-83f2-de6f7858c66c" containerID="a3ad3388bf012c4bf6024ea242085f3e5d118d0fab92a49bc08bacd4790fec7f" exitCode=0 Oct 08 07:33:09 crc kubenswrapper[4693]: I1008 07:33:09.249679 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6e46-account-create-cd8wf" event={"ID":"0668b2a6-74b5-4ac0-83f2-de6f7858c66c","Type":"ContainerDied","Data":"a3ad3388bf012c4bf6024ea242085f3e5d118d0fab92a49bc08bacd4790fec7f"} Oct 08 07:33:09 crc kubenswrapper[4693]: I1008 07:33:09.249695 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6e46-account-create-cd8wf" event={"ID":"0668b2a6-74b5-4ac0-83f2-de6f7858c66c","Type":"ContainerStarted","Data":"c185399eb2b41ab8b02cfef49a10d275e1ee7b03da54ef0ccb85606c55f67f0b"} Oct 08 07:33:09 crc kubenswrapper[4693]: I1008 07:33:09.252251 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"298a15e0-992f-4f83-8067-7e8e6aa47b89","Type":"ContainerStarted","Data":"6bfb636cfdf65d08dd27baabbbfa418fd12ef5a612232049b6668f4f7b4eacd1"} Oct 08 07:33:09 crc kubenswrapper[4693]: I1008 07:33:09.252278 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"298a15e0-992f-4f83-8067-7e8e6aa47b89","Type":"ContainerStarted","Data":"c9f60dca9a7981a30ab3259ce04874b40dec96289bbd7879fad33f3215fa4f40"} Oct 08 07:33:09 crc kubenswrapper[4693]: I1008 07:33:09.252301 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"298a15e0-992f-4f83-8067-7e8e6aa47b89","Type":"ContainerStarted","Data":"1ac999c6d4fe6b84ec8b649d31a767834077821823f5d7816a8a587852d2bfad"} Oct 08 07:33:09 crc kubenswrapper[4693]: I1008 07:33:09.252310 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"298a15e0-992f-4f83-8067-7e8e6aa47b89","Type":"ContainerStarted","Data":"c23bce7f1735c679fcf8530495ad02a546162fdf2a436ea00a4ea9e6c41ec085"} Oct 08 07:33:09 crc kubenswrapper[4693]: I1008 07:33:09.253828 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-k4zk2" event={"ID":"c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4","Type":"ContainerStarted","Data":"4588f464f4698364fbc5d20292315c7b643f212a182dea3c6c7d4279f0c7b3b5"} Oct 08 07:33:09 crc kubenswrapper[4693]: I1008 07:33:09.255524 4693 generic.go:334] "Generic (PLEG): container finished" podID="a7a20db2-171f-4856-a3e0-416a6ad7c27c" containerID="11b71e180e40027c311985fde67763af5c3c9ae59387c442c1221110f80b31dd" exitCode=0 Oct 08 07:33:09 crc kubenswrapper[4693]: I1008 07:33:09.255552 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-66d0-account-create-nvqcp" event={"ID":"a7a20db2-171f-4856-a3e0-416a6ad7c27c","Type":"ContainerDied","Data":"11b71e180e40027c311985fde67763af5c3c9ae59387c442c1221110f80b31dd"} Oct 08 07:33:09 crc kubenswrapper[4693]: I1008 07:33:09.255567 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-66d0-account-create-nvqcp" event={"ID":"a7a20db2-171f-4856-a3e0-416a6ad7c27c","Type":"ContainerStarted","Data":"25fd09d12e16254617667e65af19061fdb46129474ed3bd70d7419c831dbc6a4"} Oct 08 07:33:09 crc kubenswrapper[4693]: I1008 07:33:09.292626 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-k4zk2" podStartSLOduration=2.943956252 podStartE2EDuration="15.292608435s" podCreationTimestamp="2025-10-08 07:32:54 +0000 UTC" firstStartedPulling="2025-10-08 07:32:55.755689902 +0000 UTC m=+961.126654837" lastFinishedPulling="2025-10-08 07:33:08.104342075 +0000 UTC m=+973.475307020" observedRunningTime="2025-10-08 07:33:09.27651497 +0000 UTC m=+974.647479905" watchObservedRunningTime="2025-10-08 07:33:09.292608435 +0000 UTC m=+974.663573370" Oct 08 07:33:12 crc kubenswrapper[4693]: I1008 07:33:12.081467 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6e46-account-create-cd8wf" Oct 08 07:33:12 crc kubenswrapper[4693]: I1008 07:33:12.133201 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-b816-account-create-vwpn5" Oct 08 07:33:12 crc kubenswrapper[4693]: I1008 07:33:12.134645 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-66d0-account-create-nvqcp" Oct 08 07:33:12 crc kubenswrapper[4693]: I1008 07:33:12.195619 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nw48f\" (UniqueName: \"kubernetes.io/projected/0668b2a6-74b5-4ac0-83f2-de6f7858c66c-kube-api-access-nw48f\") pod \"0668b2a6-74b5-4ac0-83f2-de6f7858c66c\" (UID: \"0668b2a6-74b5-4ac0-83f2-de6f7858c66c\") " Oct 08 07:33:12 crc kubenswrapper[4693]: I1008 07:33:12.199772 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0668b2a6-74b5-4ac0-83f2-de6f7858c66c-kube-api-access-nw48f" (OuterVolumeSpecName: "kube-api-access-nw48f") pod "0668b2a6-74b5-4ac0-83f2-de6f7858c66c" (UID: "0668b2a6-74b5-4ac0-83f2-de6f7858c66c"). InnerVolumeSpecName "kube-api-access-nw48f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:33:12 crc kubenswrapper[4693]: I1008 07:33:12.281958 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-66d0-account-create-nvqcp" event={"ID":"a7a20db2-171f-4856-a3e0-416a6ad7c27c","Type":"ContainerDied","Data":"25fd09d12e16254617667e65af19061fdb46129474ed3bd70d7419c831dbc6a4"} Oct 08 07:33:12 crc kubenswrapper[4693]: I1008 07:33:12.281998 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="25fd09d12e16254617667e65af19061fdb46129474ed3bd70d7419c831dbc6a4" Oct 08 07:33:12 crc kubenswrapper[4693]: I1008 07:33:12.282048 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-66d0-account-create-nvqcp" Oct 08 07:33:12 crc kubenswrapper[4693]: I1008 07:33:12.284074 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-p8hg9" event={"ID":"15ef4c47-e897-4afe-839d-31e54512a16b","Type":"ContainerStarted","Data":"241f808f0059de9604d1f15a065ed76a29e9151236ae2833248db83af252b645"} Oct 08 07:33:12 crc kubenswrapper[4693]: I1008 07:33:12.287523 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-b816-account-create-vwpn5" event={"ID":"755f3d62-7b37-470b-94ae-9e9c6a7df2d1","Type":"ContainerDied","Data":"49f98afd6bf9f8dcd02f736fdc993c0828a44b6887d62e1f05ecb8e946111b76"} Oct 08 07:33:12 crc kubenswrapper[4693]: I1008 07:33:12.287561 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="49f98afd6bf9f8dcd02f736fdc993c0828a44b6887d62e1f05ecb8e946111b76" Oct 08 07:33:12 crc kubenswrapper[4693]: I1008 07:33:12.287590 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-b816-account-create-vwpn5" Oct 08 07:33:12 crc kubenswrapper[4693]: I1008 07:33:12.289523 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6e46-account-create-cd8wf" event={"ID":"0668b2a6-74b5-4ac0-83f2-de6f7858c66c","Type":"ContainerDied","Data":"c185399eb2b41ab8b02cfef49a10d275e1ee7b03da54ef0ccb85606c55f67f0b"} Oct 08 07:33:12 crc kubenswrapper[4693]: I1008 07:33:12.289550 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c185399eb2b41ab8b02cfef49a10d275e1ee7b03da54ef0ccb85606c55f67f0b" Oct 08 07:33:12 crc kubenswrapper[4693]: I1008 07:33:12.289599 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6e46-account-create-cd8wf" Oct 08 07:33:12 crc kubenswrapper[4693]: I1008 07:33:12.296851 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dcczw\" (UniqueName: \"kubernetes.io/projected/755f3d62-7b37-470b-94ae-9e9c6a7df2d1-kube-api-access-dcczw\") pod \"755f3d62-7b37-470b-94ae-9e9c6a7df2d1\" (UID: \"755f3d62-7b37-470b-94ae-9e9c6a7df2d1\") " Oct 08 07:33:12 crc kubenswrapper[4693]: I1008 07:33:12.296965 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tcqnj\" (UniqueName: \"kubernetes.io/projected/a7a20db2-171f-4856-a3e0-416a6ad7c27c-kube-api-access-tcqnj\") pod \"a7a20db2-171f-4856-a3e0-416a6ad7c27c\" (UID: \"a7a20db2-171f-4856-a3e0-416a6ad7c27c\") " Oct 08 07:33:12 crc kubenswrapper[4693]: I1008 07:33:12.302957 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/755f3d62-7b37-470b-94ae-9e9c6a7df2d1-kube-api-access-dcczw" (OuterVolumeSpecName: "kube-api-access-dcczw") pod "755f3d62-7b37-470b-94ae-9e9c6a7df2d1" (UID: "755f3d62-7b37-470b-94ae-9e9c6a7df2d1"). InnerVolumeSpecName "kube-api-access-dcczw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:33:12 crc kubenswrapper[4693]: I1008 07:33:12.304237 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nw48f\" (UniqueName: \"kubernetes.io/projected/0668b2a6-74b5-4ac0-83f2-de6f7858c66c-kube-api-access-nw48f\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:12 crc kubenswrapper[4693]: I1008 07:33:12.304279 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dcczw\" (UniqueName: \"kubernetes.io/projected/755f3d62-7b37-470b-94ae-9e9c6a7df2d1-kube-api-access-dcczw\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:12 crc kubenswrapper[4693]: I1008 07:33:12.304909 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-p8hg9" podStartSLOduration=1.905913049 podStartE2EDuration="13.304892484s" podCreationTimestamp="2025-10-08 07:32:59 +0000 UTC" firstStartedPulling="2025-10-08 07:33:00.556998716 +0000 UTC m=+965.927963661" lastFinishedPulling="2025-10-08 07:33:11.955978121 +0000 UTC m=+977.326943096" observedRunningTime="2025-10-08 07:33:12.299913032 +0000 UTC m=+977.670877977" watchObservedRunningTime="2025-10-08 07:33:12.304892484 +0000 UTC m=+977.675857419" Oct 08 07:33:12 crc kubenswrapper[4693]: I1008 07:33:12.305561 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7a20db2-171f-4856-a3e0-416a6ad7c27c-kube-api-access-tcqnj" (OuterVolumeSpecName: "kube-api-access-tcqnj") pod "a7a20db2-171f-4856-a3e0-416a6ad7c27c" (UID: "a7a20db2-171f-4856-a3e0-416a6ad7c27c"). InnerVolumeSpecName "kube-api-access-tcqnj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:33:12 crc kubenswrapper[4693]: I1008 07:33:12.405318 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tcqnj\" (UniqueName: \"kubernetes.io/projected/a7a20db2-171f-4856-a3e0-416a6ad7c27c-kube-api-access-tcqnj\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:13 crc kubenswrapper[4693]: I1008 07:33:13.302991 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"298a15e0-992f-4f83-8067-7e8e6aa47b89","Type":"ContainerStarted","Data":"0839187765e1c048018d0a4359baf8e30e6717ac210892b3a6c1a19d0bcf78a4"} Oct 08 07:33:13 crc kubenswrapper[4693]: I1008 07:33:13.303238 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"298a15e0-992f-4f83-8067-7e8e6aa47b89","Type":"ContainerStarted","Data":"d601781d7ac814400064febf530ca295af62a876b1f189ddeed3eb4188ff644b"} Oct 08 07:33:13 crc kubenswrapper[4693]: I1008 07:33:13.303249 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"298a15e0-992f-4f83-8067-7e8e6aa47b89","Type":"ContainerStarted","Data":"369b91510ebbb58556947f1a1f94d418c9e74bd56427d784349e75a1f7d85c7e"} Oct 08 07:33:13 crc kubenswrapper[4693]: I1008 07:33:13.303257 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"298a15e0-992f-4f83-8067-7e8e6aa47b89","Type":"ContainerStarted","Data":"d3143767ae580ac95883b2cfcbb0c9a4d93c526f8da489eafc10ccc9c570dbdc"} Oct 08 07:33:15 crc kubenswrapper[4693]: I1008 07:33:15.323480 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"298a15e0-992f-4f83-8067-7e8e6aa47b89","Type":"ContainerStarted","Data":"11354495b7ad27c864bae5e3621fafd25893d1aca3c9bda60c64d724d9609687"} Oct 08 07:33:15 crc kubenswrapper[4693]: I1008 07:33:15.323971 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"298a15e0-992f-4f83-8067-7e8e6aa47b89","Type":"ContainerStarted","Data":"73a7f00c5ccae312e440ad8824fc10bfab5f095ca5c538010e90d4417d043582"} Oct 08 07:33:15 crc kubenswrapper[4693]: I1008 07:33:15.323984 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"298a15e0-992f-4f83-8067-7e8e6aa47b89","Type":"ContainerStarted","Data":"9b33483a50009aab01b4ac3d135558ed1907f5b930ade8e12f578623d96e5cec"} Oct 08 07:33:15 crc kubenswrapper[4693]: I1008 07:33:15.323994 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"298a15e0-992f-4f83-8067-7e8e6aa47b89","Type":"ContainerStarted","Data":"322a48d72ff51ae8a2839eb692af4370001c6e5abc4624d233a610f5e895ffd6"} Oct 08 07:33:15 crc kubenswrapper[4693]: I1008 07:33:15.324003 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"298a15e0-992f-4f83-8067-7e8e6aa47b89","Type":"ContainerStarted","Data":"0e3d15a702080e91e0d6f617ad38d443b1123eb4db8a25a785a97db96018eba7"} Oct 08 07:33:15 crc kubenswrapper[4693]: I1008 07:33:15.326657 4693 generic.go:334] "Generic (PLEG): container finished" podID="c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4" containerID="4588f464f4698364fbc5d20292315c7b643f212a182dea3c6c7d4279f0c7b3b5" exitCode=0 Oct 08 07:33:15 crc kubenswrapper[4693]: I1008 07:33:15.326714 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-k4zk2" event={"ID":"c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4","Type":"ContainerDied","Data":"4588f464f4698364fbc5d20292315c7b643f212a182dea3c6c7d4279f0c7b3b5"} Oct 08 07:33:15 crc kubenswrapper[4693]: I1008 07:33:15.328386 4693 generic.go:334] "Generic (PLEG): container finished" podID="15ef4c47-e897-4afe-839d-31e54512a16b" containerID="241f808f0059de9604d1f15a065ed76a29e9151236ae2833248db83af252b645" exitCode=0 Oct 08 07:33:15 crc kubenswrapper[4693]: I1008 07:33:15.328425 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-p8hg9" event={"ID":"15ef4c47-e897-4afe-839d-31e54512a16b","Type":"ContainerDied","Data":"241f808f0059de9604d1f15a065ed76a29e9151236ae2833248db83af252b645"} Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.351836 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"298a15e0-992f-4f83-8067-7e8e6aa47b89","Type":"ContainerStarted","Data":"73d2391697d705b7509c64d2980ae41704e7177301bd9b4c9fbfd2dbd26e1992"} Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.352285 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"298a15e0-992f-4f83-8067-7e8e6aa47b89","Type":"ContainerStarted","Data":"69d4abacbbacf9b26adffc7994d7dd9f5a2746b304abd3ecfa125db0d397a158"} Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.442492 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=36.188387622 podStartE2EDuration="50.44246827s" podCreationTimestamp="2025-10-08 07:32:26 +0000 UTC" firstStartedPulling="2025-10-08 07:33:00.098371929 +0000 UTC m=+965.469336864" lastFinishedPulling="2025-10-08 07:33:14.352452577 +0000 UTC m=+979.723417512" observedRunningTime="2025-10-08 07:33:16.412654964 +0000 UTC m=+981.783619959" watchObservedRunningTime="2025-10-08 07:33:16.44246827 +0000 UTC m=+981.813433215" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.720975 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-jwdtv"] Oct 08 07:33:16 crc kubenswrapper[4693]: E1008 07:33:16.721633 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7a20db2-171f-4856-a3e0-416a6ad7c27c" containerName="mariadb-account-create" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.721650 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7a20db2-171f-4856-a3e0-416a6ad7c27c" containerName="mariadb-account-create" Oct 08 07:33:16 crc kubenswrapper[4693]: E1008 07:33:16.721672 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0668b2a6-74b5-4ac0-83f2-de6f7858c66c" containerName="mariadb-account-create" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.721680 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="0668b2a6-74b5-4ac0-83f2-de6f7858c66c" containerName="mariadb-account-create" Oct 08 07:33:16 crc kubenswrapper[4693]: E1008 07:33:16.721691 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="755f3d62-7b37-470b-94ae-9e9c6a7df2d1" containerName="mariadb-account-create" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.721697 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="755f3d62-7b37-470b-94ae-9e9c6a7df2d1" containerName="mariadb-account-create" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.721879 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7a20db2-171f-4856-a3e0-416a6ad7c27c" containerName="mariadb-account-create" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.721903 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="0668b2a6-74b5-4ac0-83f2-de6f7858c66c" containerName="mariadb-account-create" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.721916 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="755f3d62-7b37-470b-94ae-9e9c6a7df2d1" containerName="mariadb-account-create" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.730439 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-jwdtv" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.733417 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.781615 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-jwdtv"] Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.877455 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-p8hg9" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.882628 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-jwdtv\" (UID: \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\") " pod="openstack/dnsmasq-dns-764c5664d7-jwdtv" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.882717 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-config\") pod \"dnsmasq-dns-764c5664d7-jwdtv\" (UID: \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\") " pod="openstack/dnsmasq-dns-764c5664d7-jwdtv" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.882969 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttgzw\" (UniqueName: \"kubernetes.io/projected/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-kube-api-access-ttgzw\") pod \"dnsmasq-dns-764c5664d7-jwdtv\" (UID: \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\") " pod="openstack/dnsmasq-dns-764c5664d7-jwdtv" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.883253 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-jwdtv\" (UID: \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\") " pod="openstack/dnsmasq-dns-764c5664d7-jwdtv" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.883287 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-dns-svc\") pod \"dnsmasq-dns-764c5664d7-jwdtv\" (UID: \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\") " pod="openstack/dnsmasq-dns-764c5664d7-jwdtv" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.883373 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-jwdtv\" (UID: \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\") " pod="openstack/dnsmasq-dns-764c5664d7-jwdtv" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.884016 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-k4zk2" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.984443 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15ef4c47-e897-4afe-839d-31e54512a16b-combined-ca-bundle\") pod \"15ef4c47-e897-4afe-839d-31e54512a16b\" (UID: \"15ef4c47-e897-4afe-839d-31e54512a16b\") " Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.984565 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4-config-data\") pod \"c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4\" (UID: \"c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4\") " Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.984592 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15ef4c47-e897-4afe-839d-31e54512a16b-config-data\") pod \"15ef4c47-e897-4afe-839d-31e54512a16b\" (UID: \"15ef4c47-e897-4afe-839d-31e54512a16b\") " Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.984636 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4-combined-ca-bundle\") pod \"c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4\" (UID: \"c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4\") " Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.984712 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lwqmc\" (UniqueName: \"kubernetes.io/projected/c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4-kube-api-access-lwqmc\") pod \"c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4\" (UID: \"c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4\") " Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.984736 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4-db-sync-config-data\") pod \"c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4\" (UID: \"c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4\") " Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.984832 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nkjtc\" (UniqueName: \"kubernetes.io/projected/15ef4c47-e897-4afe-839d-31e54512a16b-kube-api-access-nkjtc\") pod \"15ef4c47-e897-4afe-839d-31e54512a16b\" (UID: \"15ef4c47-e897-4afe-839d-31e54512a16b\") " Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.985043 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-config\") pod \"dnsmasq-dns-764c5664d7-jwdtv\" (UID: \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\") " pod="openstack/dnsmasq-dns-764c5664d7-jwdtv" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.985072 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttgzw\" (UniqueName: \"kubernetes.io/projected/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-kube-api-access-ttgzw\") pod \"dnsmasq-dns-764c5664d7-jwdtv\" (UID: \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\") " pod="openstack/dnsmasq-dns-764c5664d7-jwdtv" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.985125 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-jwdtv\" (UID: \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\") " pod="openstack/dnsmasq-dns-764c5664d7-jwdtv" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.985142 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-dns-svc\") pod \"dnsmasq-dns-764c5664d7-jwdtv\" (UID: \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\") " pod="openstack/dnsmasq-dns-764c5664d7-jwdtv" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.985155 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-jwdtv\" (UID: \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\") " pod="openstack/dnsmasq-dns-764c5664d7-jwdtv" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.985193 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-jwdtv\" (UID: \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\") " pod="openstack/dnsmasq-dns-764c5664d7-jwdtv" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.986047 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-jwdtv\" (UID: \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\") " pod="openstack/dnsmasq-dns-764c5664d7-jwdtv" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.990837 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4" (UID: "c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.991105 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-config\") pod \"dnsmasq-dns-764c5664d7-jwdtv\" (UID: \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\") " pod="openstack/dnsmasq-dns-764c5664d7-jwdtv" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.991322 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-dns-svc\") pod \"dnsmasq-dns-764c5664d7-jwdtv\" (UID: \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\") " pod="openstack/dnsmasq-dns-764c5664d7-jwdtv" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.991993 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4-kube-api-access-lwqmc" (OuterVolumeSpecName: "kube-api-access-lwqmc") pod "c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4" (UID: "c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4"). InnerVolumeSpecName "kube-api-access-lwqmc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.993395 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-jwdtv\" (UID: \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\") " pod="openstack/dnsmasq-dns-764c5664d7-jwdtv" Oct 08 07:33:16 crc kubenswrapper[4693]: I1008 07:33:16.996114 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-jwdtv\" (UID: \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\") " pod="openstack/dnsmasq-dns-764c5664d7-jwdtv" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.007153 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttgzw\" (UniqueName: \"kubernetes.io/projected/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-kube-api-access-ttgzw\") pod \"dnsmasq-dns-764c5664d7-jwdtv\" (UID: \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\") " pod="openstack/dnsmasq-dns-764c5664d7-jwdtv" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.008420 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15ef4c47-e897-4afe-839d-31e54512a16b-kube-api-access-nkjtc" (OuterVolumeSpecName: "kube-api-access-nkjtc") pod "15ef4c47-e897-4afe-839d-31e54512a16b" (UID: "15ef4c47-e897-4afe-839d-31e54512a16b"). InnerVolumeSpecName "kube-api-access-nkjtc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.017031 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4" (UID: "c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.024640 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15ef4c47-e897-4afe-839d-31e54512a16b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "15ef4c47-e897-4afe-839d-31e54512a16b" (UID: "15ef4c47-e897-4afe-839d-31e54512a16b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.037146 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4-config-data" (OuterVolumeSpecName: "config-data") pod "c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4" (UID: "c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.044135 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15ef4c47-e897-4afe-839d-31e54512a16b-config-data" (OuterVolumeSpecName: "config-data") pod "15ef4c47-e897-4afe-839d-31e54512a16b" (UID: "15ef4c47-e897-4afe-839d-31e54512a16b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.086762 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lwqmc\" (UniqueName: \"kubernetes.io/projected/c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4-kube-api-access-lwqmc\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.086802 4693 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.086845 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nkjtc\" (UniqueName: \"kubernetes.io/projected/15ef4c47-e897-4afe-839d-31e54512a16b-kube-api-access-nkjtc\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.086855 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15ef4c47-e897-4afe-839d-31e54512a16b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.086864 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.086872 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15ef4c47-e897-4afe-839d-31e54512a16b-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.086880 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.174435 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-jwdtv" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.360548 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-k4zk2" event={"ID":"c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4","Type":"ContainerDied","Data":"caa9de3f79a7d4e7bce53affaa7c1d7f0d87a0b5115e6e0c3a13db523b313be7"} Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.360936 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="caa9de3f79a7d4e7bce53affaa7c1d7f0d87a0b5115e6e0c3a13db523b313be7" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.360602 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-k4zk2" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.371272 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-p8hg9" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.395946 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-p8hg9" event={"ID":"15ef4c47-e897-4afe-839d-31e54512a16b","Type":"ContainerDied","Data":"fcb3211833a0f8965834bd1da6b97bd01da12010171eb4fbb0a128f207dfb9b1"} Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.395981 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fcb3211833a0f8965834bd1da6b97bd01da12010171eb4fbb0a128f207dfb9b1" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.505393 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-jwdtv"] Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.538057 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-vgrz9"] Oct 08 07:33:17 crc kubenswrapper[4693]: E1008 07:33:17.540968 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15ef4c47-e897-4afe-839d-31e54512a16b" containerName="keystone-db-sync" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.541082 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="15ef4c47-e897-4afe-839d-31e54512a16b" containerName="keystone-db-sync" Oct 08 07:33:17 crc kubenswrapper[4693]: E1008 07:33:17.541405 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4" containerName="glance-db-sync" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.541490 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4" containerName="glance-db-sync" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.541764 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="15ef4c47-e897-4afe-839d-31e54512a16b" containerName="keystone-db-sync" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.542001 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4" containerName="glance-db-sync" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.542860 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vgrz9" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.549343 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.549520 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.549644 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-nd6nt" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.549757 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.550695 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-vgrz9"] Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.568452 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-jwdtv"] Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.624851 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-mn4l6"] Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.631293 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-mn4l6" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.688887 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-mn4l6"] Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.704323 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-ovsdbserver-sb\") pod \"dnsmasq-dns-5959f8865f-mn4l6\" (UID: \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\") " pod="openstack/dnsmasq-dns-5959f8865f-mn4l6" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.704605 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-ovsdbserver-nb\") pod \"dnsmasq-dns-5959f8865f-mn4l6\" (UID: \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\") " pod="openstack/dnsmasq-dns-5959f8865f-mn4l6" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.704750 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-config\") pod \"dnsmasq-dns-5959f8865f-mn4l6\" (UID: \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\") " pod="openstack/dnsmasq-dns-5959f8865f-mn4l6" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.704881 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-combined-ca-bundle\") pod \"keystone-bootstrap-vgrz9\" (UID: \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\") " pod="openstack/keystone-bootstrap-vgrz9" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.704983 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skfzw\" (UniqueName: \"kubernetes.io/projected/ea9cbb40-f943-4242-96b5-de1c252ec7c1-kube-api-access-skfzw\") pod \"dnsmasq-dns-5959f8865f-mn4l6\" (UID: \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\") " pod="openstack/dnsmasq-dns-5959f8865f-mn4l6" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.705123 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-config-data\") pod \"keystone-bootstrap-vgrz9\" (UID: \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\") " pod="openstack/keystone-bootstrap-vgrz9" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.705210 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-scripts\") pod \"keystone-bootstrap-vgrz9\" (UID: \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\") " pod="openstack/keystone-bootstrap-vgrz9" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.705322 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kns79\" (UniqueName: \"kubernetes.io/projected/bac28fa6-fc14-4384-a5c2-969c7f422bbf-kube-api-access-kns79\") pod \"keystone-bootstrap-vgrz9\" (UID: \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\") " pod="openstack/keystone-bootstrap-vgrz9" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.705420 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-dns-svc\") pod \"dnsmasq-dns-5959f8865f-mn4l6\" (UID: \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\") " pod="openstack/dnsmasq-dns-5959f8865f-mn4l6" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.705528 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-fernet-keys\") pod \"keystone-bootstrap-vgrz9\" (UID: \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\") " pod="openstack/keystone-bootstrap-vgrz9" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.705757 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-credential-keys\") pod \"keystone-bootstrap-vgrz9\" (UID: \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\") " pod="openstack/keystone-bootstrap-vgrz9" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.705891 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-dns-swift-storage-0\") pod \"dnsmasq-dns-5959f8865f-mn4l6\" (UID: \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\") " pod="openstack/dnsmasq-dns-5959f8865f-mn4l6" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.735143 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5c7456bbdf-llw58"] Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.736737 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5c7456bbdf-llw58" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.741108 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.741293 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.750302 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5c7456bbdf-llw58"] Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.757358 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-r7p2j"] Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.758315 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-r7p2j" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.762085 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-2k87v" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.762271 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.769115 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.769248 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-k5q4c" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.769626 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.791573 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-r7p2j"] Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.807343 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8c65eaa0-7e61-4044-99d2-c61192d02cae-horizon-secret-key\") pod \"horizon-5c7456bbdf-llw58\" (UID: \"8c65eaa0-7e61-4044-99d2-c61192d02cae\") " pod="openstack/horizon-5c7456bbdf-llw58" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.807405 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8c65eaa0-7e61-4044-99d2-c61192d02cae-config-data\") pod \"horizon-5c7456bbdf-llw58\" (UID: \"8c65eaa0-7e61-4044-99d2-c61192d02cae\") " pod="openstack/horizon-5c7456bbdf-llw58" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.807532 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmk79\" (UniqueName: \"kubernetes.io/projected/8c65eaa0-7e61-4044-99d2-c61192d02cae-kube-api-access-zmk79\") pod \"horizon-5c7456bbdf-llw58\" (UID: \"8c65eaa0-7e61-4044-99d2-c61192d02cae\") " pod="openstack/horizon-5c7456bbdf-llw58" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.807571 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-ovsdbserver-sb\") pod \"dnsmasq-dns-5959f8865f-mn4l6\" (UID: \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\") " pod="openstack/dnsmasq-dns-5959f8865f-mn4l6" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.807593 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c65eaa0-7e61-4044-99d2-c61192d02cae-logs\") pod \"horizon-5c7456bbdf-llw58\" (UID: \"8c65eaa0-7e61-4044-99d2-c61192d02cae\") " pod="openstack/horizon-5c7456bbdf-llw58" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.807624 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8c65eaa0-7e61-4044-99d2-c61192d02cae-scripts\") pod \"horizon-5c7456bbdf-llw58\" (UID: \"8c65eaa0-7e61-4044-99d2-c61192d02cae\") " pod="openstack/horizon-5c7456bbdf-llw58" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.807683 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-ovsdbserver-nb\") pod \"dnsmasq-dns-5959f8865f-mn4l6\" (UID: \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\") " pod="openstack/dnsmasq-dns-5959f8865f-mn4l6" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.807707 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-config\") pod \"dnsmasq-dns-5959f8865f-mn4l6\" (UID: \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\") " pod="openstack/dnsmasq-dns-5959f8865f-mn4l6" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.807724 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-combined-ca-bundle\") pod \"keystone-bootstrap-vgrz9\" (UID: \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\") " pod="openstack/keystone-bootstrap-vgrz9" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.807751 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skfzw\" (UniqueName: \"kubernetes.io/projected/ea9cbb40-f943-4242-96b5-de1c252ec7c1-kube-api-access-skfzw\") pod \"dnsmasq-dns-5959f8865f-mn4l6\" (UID: \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\") " pod="openstack/dnsmasq-dns-5959f8865f-mn4l6" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.807802 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-config-data\") pod \"keystone-bootstrap-vgrz9\" (UID: \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\") " pod="openstack/keystone-bootstrap-vgrz9" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.807844 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-scripts\") pod \"keystone-bootstrap-vgrz9\" (UID: \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\") " pod="openstack/keystone-bootstrap-vgrz9" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.807866 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kns79\" (UniqueName: \"kubernetes.io/projected/bac28fa6-fc14-4384-a5c2-969c7f422bbf-kube-api-access-kns79\") pod \"keystone-bootstrap-vgrz9\" (UID: \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\") " pod="openstack/keystone-bootstrap-vgrz9" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.807890 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-dns-svc\") pod \"dnsmasq-dns-5959f8865f-mn4l6\" (UID: \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\") " pod="openstack/dnsmasq-dns-5959f8865f-mn4l6" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.807938 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-fernet-keys\") pod \"keystone-bootstrap-vgrz9\" (UID: \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\") " pod="openstack/keystone-bootstrap-vgrz9" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.807958 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-credential-keys\") pod \"keystone-bootstrap-vgrz9\" (UID: \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\") " pod="openstack/keystone-bootstrap-vgrz9" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.807977 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-dns-swift-storage-0\") pod \"dnsmasq-dns-5959f8865f-mn4l6\" (UID: \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\") " pod="openstack/dnsmasq-dns-5959f8865f-mn4l6" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.808715 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-dns-swift-storage-0\") pod \"dnsmasq-dns-5959f8865f-mn4l6\" (UID: \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\") " pod="openstack/dnsmasq-dns-5959f8865f-mn4l6" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.809232 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-ovsdbserver-sb\") pod \"dnsmasq-dns-5959f8865f-mn4l6\" (UID: \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\") " pod="openstack/dnsmasq-dns-5959f8865f-mn4l6" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.809730 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-ovsdbserver-nb\") pod \"dnsmasq-dns-5959f8865f-mn4l6\" (UID: \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\") " pod="openstack/dnsmasq-dns-5959f8865f-mn4l6" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.810259 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-config\") pod \"dnsmasq-dns-5959f8865f-mn4l6\" (UID: \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\") " pod="openstack/dnsmasq-dns-5959f8865f-mn4l6" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.811045 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-dns-svc\") pod \"dnsmasq-dns-5959f8865f-mn4l6\" (UID: \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\") " pod="openstack/dnsmasq-dns-5959f8865f-mn4l6" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.819712 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-scripts\") pod \"keystone-bootstrap-vgrz9\" (UID: \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\") " pod="openstack/keystone-bootstrap-vgrz9" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.828651 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-fernet-keys\") pod \"keystone-bootstrap-vgrz9\" (UID: \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\") " pod="openstack/keystone-bootstrap-vgrz9" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.830098 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-credential-keys\") pod \"keystone-bootstrap-vgrz9\" (UID: \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\") " pod="openstack/keystone-bootstrap-vgrz9" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.833082 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skfzw\" (UniqueName: \"kubernetes.io/projected/ea9cbb40-f943-4242-96b5-de1c252ec7c1-kube-api-access-skfzw\") pod \"dnsmasq-dns-5959f8865f-mn4l6\" (UID: \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\") " pod="openstack/dnsmasq-dns-5959f8865f-mn4l6" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.836722 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-config-data\") pod \"keystone-bootstrap-vgrz9\" (UID: \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\") " pod="openstack/keystone-bootstrap-vgrz9" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.846579 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-combined-ca-bundle\") pod \"keystone-bootstrap-vgrz9\" (UID: \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\") " pod="openstack/keystone-bootstrap-vgrz9" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.854668 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kns79\" (UniqueName: \"kubernetes.io/projected/bac28fa6-fc14-4384-a5c2-969c7f422bbf-kube-api-access-kns79\") pod \"keystone-bootstrap-vgrz9\" (UID: \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\") " pod="openstack/keystone-bootstrap-vgrz9" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.863644 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-m4xlg"] Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.864746 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-m4xlg" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.867375 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.869991 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-s2pl4" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.870340 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.873251 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vgrz9" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.890690 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-m4xlg"] Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.902884 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-rxtmf"] Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.903995 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-rxtmf" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.912592 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.913266 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-bq7dn" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.913833 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8c65eaa0-7e61-4044-99d2-c61192d02cae-config-data\") pod \"horizon-5c7456bbdf-llw58\" (UID: \"8c65eaa0-7e61-4044-99d2-c61192d02cae\") " pod="openstack/horizon-5c7456bbdf-llw58" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.913877 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-scripts\") pod \"cinder-db-sync-r7p2j\" (UID: \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\") " pod="openstack/cinder-db-sync-r7p2j" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.913900 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmk79\" (UniqueName: \"kubernetes.io/projected/8c65eaa0-7e61-4044-99d2-c61192d02cae-kube-api-access-zmk79\") pod \"horizon-5c7456bbdf-llw58\" (UID: \"8c65eaa0-7e61-4044-99d2-c61192d02cae\") " pod="openstack/horizon-5c7456bbdf-llw58" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.913929 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c65eaa0-7e61-4044-99d2-c61192d02cae-logs\") pod \"horizon-5c7456bbdf-llw58\" (UID: \"8c65eaa0-7e61-4044-99d2-c61192d02cae\") " pod="openstack/horizon-5c7456bbdf-llw58" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.913951 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8c65eaa0-7e61-4044-99d2-c61192d02cae-scripts\") pod \"horizon-5c7456bbdf-llw58\" (UID: \"8c65eaa0-7e61-4044-99d2-c61192d02cae\") " pod="openstack/horizon-5c7456bbdf-llw58" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.913978 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-config-data\") pod \"cinder-db-sync-r7p2j\" (UID: \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\") " pod="openstack/cinder-db-sync-r7p2j" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.914038 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdmgt\" (UniqueName: \"kubernetes.io/projected/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-kube-api-access-gdmgt\") pod \"cinder-db-sync-r7p2j\" (UID: \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\") " pod="openstack/cinder-db-sync-r7p2j" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.914062 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-combined-ca-bundle\") pod \"cinder-db-sync-r7p2j\" (UID: \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\") " pod="openstack/cinder-db-sync-r7p2j" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.914080 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-etc-machine-id\") pod \"cinder-db-sync-r7p2j\" (UID: \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\") " pod="openstack/cinder-db-sync-r7p2j" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.914093 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-db-sync-config-data\") pod \"cinder-db-sync-r7p2j\" (UID: \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\") " pod="openstack/cinder-db-sync-r7p2j" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.914116 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8c65eaa0-7e61-4044-99d2-c61192d02cae-horizon-secret-key\") pod \"horizon-5c7456bbdf-llw58\" (UID: \"8c65eaa0-7e61-4044-99d2-c61192d02cae\") " pod="openstack/horizon-5c7456bbdf-llw58" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.924328 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8c65eaa0-7e61-4044-99d2-c61192d02cae-scripts\") pod \"horizon-5c7456bbdf-llw58\" (UID: \"8c65eaa0-7e61-4044-99d2-c61192d02cae\") " pod="openstack/horizon-5c7456bbdf-llw58" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.925522 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8c65eaa0-7e61-4044-99d2-c61192d02cae-config-data\") pod \"horizon-5c7456bbdf-llw58\" (UID: \"8c65eaa0-7e61-4044-99d2-c61192d02cae\") " pod="openstack/horizon-5c7456bbdf-llw58" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.926001 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c65eaa0-7e61-4044-99d2-c61192d02cae-logs\") pod \"horizon-5c7456bbdf-llw58\" (UID: \"8c65eaa0-7e61-4044-99d2-c61192d02cae\") " pod="openstack/horizon-5c7456bbdf-llw58" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.925999 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-mn4l6"] Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.926593 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-mn4l6" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.930052 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8c65eaa0-7e61-4044-99d2-c61192d02cae-horizon-secret-key\") pod \"horizon-5c7456bbdf-llw58\" (UID: \"8c65eaa0-7e61-4044-99d2-c61192d02cae\") " pod="openstack/horizon-5c7456bbdf-llw58" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.943326 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-rxtmf"] Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.960104 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmk79\" (UniqueName: \"kubernetes.io/projected/8c65eaa0-7e61-4044-99d2-c61192d02cae-kube-api-access-zmk79\") pod \"horizon-5c7456bbdf-llw58\" (UID: \"8c65eaa0-7e61-4044-99d2-c61192d02cae\") " pod="openstack/horizon-5c7456bbdf-llw58" Oct 08 07:33:17 crc kubenswrapper[4693]: I1008 07:33:17.997392 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-trdh5"] Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.000315 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-trdh5" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.028598 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-trdh5"] Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.044477 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.044714 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-g7qwr" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.044855 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.047093 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-config-data\") pod \"cinder-db-sync-r7p2j\" (UID: \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\") " pod="openstack/cinder-db-sync-r7p2j" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.047193 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-scripts\") pod \"placement-db-sync-m4xlg\" (UID: \"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe\") " pod="openstack/placement-db-sync-m4xlg" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.047282 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-config-data\") pod \"placement-db-sync-m4xlg\" (UID: \"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe\") " pod="openstack/placement-db-sync-m4xlg" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.047314 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgg9b\" (UniqueName: \"kubernetes.io/projected/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-kube-api-access-vgg9b\") pod \"placement-db-sync-m4xlg\" (UID: \"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe\") " pod="openstack/placement-db-sync-m4xlg" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.047356 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdmgt\" (UniqueName: \"kubernetes.io/projected/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-kube-api-access-gdmgt\") pod \"cinder-db-sync-r7p2j\" (UID: \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\") " pod="openstack/cinder-db-sync-r7p2j" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.047379 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79884c8c-f689-46b7-9223-66bd0b7bff8e-combined-ca-bundle\") pod \"barbican-db-sync-rxtmf\" (UID: \"79884c8c-f689-46b7-9223-66bd0b7bff8e\") " pod="openstack/barbican-db-sync-rxtmf" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.047412 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-logs\") pod \"placement-db-sync-m4xlg\" (UID: \"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe\") " pod="openstack/placement-db-sync-m4xlg" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.047440 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-combined-ca-bundle\") pod \"cinder-db-sync-r7p2j\" (UID: \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\") " pod="openstack/cinder-db-sync-r7p2j" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.047464 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-etc-machine-id\") pod \"cinder-db-sync-r7p2j\" (UID: \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\") " pod="openstack/cinder-db-sync-r7p2j" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.047486 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-db-sync-config-data\") pod \"cinder-db-sync-r7p2j\" (UID: \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\") " pod="openstack/cinder-db-sync-r7p2j" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.047523 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcpj6\" (UniqueName: \"kubernetes.io/projected/79884c8c-f689-46b7-9223-66bd0b7bff8e-kube-api-access-xcpj6\") pod \"barbican-db-sync-rxtmf\" (UID: \"79884c8c-f689-46b7-9223-66bd0b7bff8e\") " pod="openstack/barbican-db-sync-rxtmf" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.047593 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-combined-ca-bundle\") pod \"placement-db-sync-m4xlg\" (UID: \"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe\") " pod="openstack/placement-db-sync-m4xlg" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.047658 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/79884c8c-f689-46b7-9223-66bd0b7bff8e-db-sync-config-data\") pod \"barbican-db-sync-rxtmf\" (UID: \"79884c8c-f689-46b7-9223-66bd0b7bff8e\") " pod="openstack/barbican-db-sync-rxtmf" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.047682 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-scripts\") pod \"cinder-db-sync-r7p2j\" (UID: \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\") " pod="openstack/cinder-db-sync-r7p2j" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.050915 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-etc-machine-id\") pod \"cinder-db-sync-r7p2j\" (UID: \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\") " pod="openstack/cinder-db-sync-r7p2j" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.055446 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-combined-ca-bundle\") pod \"cinder-db-sync-r7p2j\" (UID: \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\") " pod="openstack/cinder-db-sync-r7p2j" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.061897 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-db-sync-config-data\") pod \"cinder-db-sync-r7p2j\" (UID: \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\") " pod="openstack/cinder-db-sync-r7p2j" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.082083 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-scripts\") pod \"cinder-db-sync-r7p2j\" (UID: \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\") " pod="openstack/cinder-db-sync-r7p2j" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.088726 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-config-data\") pod \"cinder-db-sync-r7p2j\" (UID: \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\") " pod="openstack/cinder-db-sync-r7p2j" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.095985 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-bnv6m"] Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.100180 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-bnv6m" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.146179 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdmgt\" (UniqueName: \"kubernetes.io/projected/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-kube-api-access-gdmgt\") pod \"cinder-db-sync-r7p2j\" (UID: \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\") " pod="openstack/cinder-db-sync-r7p2j" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.146659 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5c7456bbdf-llw58" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.181622 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-bnv6m"] Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.206291 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-bnv6m"] Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.227556 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-597998b4bf-mrxcd"] Oct 08 07:33:18 crc kubenswrapper[4693]: E1008 07:33:18.228008 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc dns-swift-storage-0 kube-api-access-5kzx5 ovsdbserver-nb ovsdbserver-sb], unattached volumes=[], failed to process volumes=[config dns-svc dns-swift-storage-0 kube-api-access-5kzx5 ovsdbserver-nb ovsdbserver-sb]: context canceled" pod="openstack/dnsmasq-dns-58dd9ff6bc-bnv6m" podUID="82bc033a-ffa3-4fd3-8134-2c438391384b" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.228064 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/79884c8c-f689-46b7-9223-66bd0b7bff8e-db-sync-config-data\") pod \"barbican-db-sync-rxtmf\" (UID: \"79884c8c-f689-46b7-9223-66bd0b7bff8e\") " pod="openstack/barbican-db-sync-rxtmf" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.228155 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-scripts\") pod \"placement-db-sync-m4xlg\" (UID: \"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe\") " pod="openstack/placement-db-sync-m4xlg" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.228201 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-config-data\") pod \"placement-db-sync-m4xlg\" (UID: \"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe\") " pod="openstack/placement-db-sync-m4xlg" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.228243 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgg9b\" (UniqueName: \"kubernetes.io/projected/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-kube-api-access-vgg9b\") pod \"placement-db-sync-m4xlg\" (UID: \"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe\") " pod="openstack/placement-db-sync-m4xlg" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.228267 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79884c8c-f689-46b7-9223-66bd0b7bff8e-combined-ca-bundle\") pod \"barbican-db-sync-rxtmf\" (UID: \"79884c8c-f689-46b7-9223-66bd0b7bff8e\") " pod="openstack/barbican-db-sync-rxtmf" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.228518 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-logs\") pod \"placement-db-sync-m4xlg\" (UID: \"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe\") " pod="openstack/placement-db-sync-m4xlg" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.228577 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/21be0841-c23b-4e2a-96dd-eebb788a1104-config\") pod \"neutron-db-sync-trdh5\" (UID: \"21be0841-c23b-4e2a-96dd-eebb788a1104\") " pod="openstack/neutron-db-sync-trdh5" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.228597 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcpj6\" (UniqueName: \"kubernetes.io/projected/79884c8c-f689-46b7-9223-66bd0b7bff8e-kube-api-access-xcpj6\") pod \"barbican-db-sync-rxtmf\" (UID: \"79884c8c-f689-46b7-9223-66bd0b7bff8e\") " pod="openstack/barbican-db-sync-rxtmf" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.228621 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4zcz\" (UniqueName: \"kubernetes.io/projected/21be0841-c23b-4e2a-96dd-eebb788a1104-kube-api-access-l4zcz\") pod \"neutron-db-sync-trdh5\" (UID: \"21be0841-c23b-4e2a-96dd-eebb788a1104\") " pod="openstack/neutron-db-sync-trdh5" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.228637 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21be0841-c23b-4e2a-96dd-eebb788a1104-combined-ca-bundle\") pod \"neutron-db-sync-trdh5\" (UID: \"21be0841-c23b-4e2a-96dd-eebb788a1104\") " pod="openstack/neutron-db-sync-trdh5" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.228675 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-combined-ca-bundle\") pod \"placement-db-sync-m4xlg\" (UID: \"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe\") " pod="openstack/placement-db-sync-m4xlg" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.229001 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.230277 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-logs\") pod \"placement-db-sync-m4xlg\" (UID: \"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe\") " pod="openstack/placement-db-sync-m4xlg" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.230618 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.230668 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-597998b4bf-mrxcd" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.235318 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.235572 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.237371 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-qkfgb"] Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.238712 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.259428 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/79884c8c-f689-46b7-9223-66bd0b7bff8e-db-sync-config-data\") pod \"barbican-db-sync-rxtmf\" (UID: \"79884c8c-f689-46b7-9223-66bd0b7bff8e\") " pod="openstack/barbican-db-sync-rxtmf" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.271644 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgg9b\" (UniqueName: \"kubernetes.io/projected/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-kube-api-access-vgg9b\") pod \"placement-db-sync-m4xlg\" (UID: \"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe\") " pod="openstack/placement-db-sync-m4xlg" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.275772 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79884c8c-f689-46b7-9223-66bd0b7bff8e-combined-ca-bundle\") pod \"barbican-db-sync-rxtmf\" (UID: \"79884c8c-f689-46b7-9223-66bd0b7bff8e\") " pod="openstack/barbican-db-sync-rxtmf" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.281920 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-597998b4bf-mrxcd"] Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.289763 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-scripts\") pod \"placement-db-sync-m4xlg\" (UID: \"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe\") " pod="openstack/placement-db-sync-m4xlg" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.293534 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-combined-ca-bundle\") pod \"placement-db-sync-m4xlg\" (UID: \"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe\") " pod="openstack/placement-db-sync-m4xlg" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.294154 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-config-data\") pod \"placement-db-sync-m4xlg\" (UID: \"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe\") " pod="openstack/placement-db-sync-m4xlg" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.297890 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcpj6\" (UniqueName: \"kubernetes.io/projected/79884c8c-f689-46b7-9223-66bd0b7bff8e-kube-api-access-xcpj6\") pod \"barbican-db-sync-rxtmf\" (UID: \"79884c8c-f689-46b7-9223-66bd0b7bff8e\") " pod="openstack/barbican-db-sync-rxtmf" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.333115 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-r7p2j" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.335792 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-qkfgb\" (UID: \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\") " pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.335917 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-qkfgb\" (UID: \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\") " pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.335941 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-scripts\") pod \"horizon-597998b4bf-mrxcd\" (UID: \"3ccf4c22-4fd6-4f83-90c3-2830af4f900f\") " pod="openstack/horizon-597998b4bf-mrxcd" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.335962 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-qkfgb\" (UID: \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\") " pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.335979 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-qkfgb\" (UID: \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\") " pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.336000 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-dns-svc\") pod \"dnsmasq-dns-58dd9ff6bc-bnv6m\" (UID: \"82bc033a-ffa3-4fd3-8134-2c438391384b\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-bnv6m" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.336016 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-ovsdbserver-sb\") pod \"dnsmasq-dns-58dd9ff6bc-bnv6m\" (UID: \"82bc033a-ffa3-4fd3-8134-2c438391384b\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-bnv6m" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.336041 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/019964d2-ae2a-4501-ad53-c6c0b2260a32-log-httpd\") pod \"ceilometer-0\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " pod="openstack/ceilometer-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.336064 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/019964d2-ae2a-4501-ad53-c6c0b2260a32-scripts\") pod \"ceilometer-0\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " pod="openstack/ceilometer-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.336082 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/019964d2-ae2a-4501-ad53-c6c0b2260a32-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " pod="openstack/ceilometer-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.336099 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7s6p\" (UniqueName: \"kubernetes.io/projected/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-kube-api-access-h7s6p\") pod \"horizon-597998b4bf-mrxcd\" (UID: \"3ccf4c22-4fd6-4f83-90c3-2830af4f900f\") " pod="openstack/horizon-597998b4bf-mrxcd" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.336128 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-logs\") pod \"horizon-597998b4bf-mrxcd\" (UID: \"3ccf4c22-4fd6-4f83-90c3-2830af4f900f\") " pod="openstack/horizon-597998b4bf-mrxcd" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.336152 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-horizon-secret-key\") pod \"horizon-597998b4bf-mrxcd\" (UID: \"3ccf4c22-4fd6-4f83-90c3-2830af4f900f\") " pod="openstack/horizon-597998b4bf-mrxcd" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.336176 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/019964d2-ae2a-4501-ad53-c6c0b2260a32-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " pod="openstack/ceilometer-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.336198 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/019964d2-ae2a-4501-ad53-c6c0b2260a32-run-httpd\") pod \"ceilometer-0\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " pod="openstack/ceilometer-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.336213 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-config\") pod \"dnsmasq-dns-58dd9ff6bc-bnv6m\" (UID: \"82bc033a-ffa3-4fd3-8134-2c438391384b\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-bnv6m" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.336235 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kzx5\" (UniqueName: \"kubernetes.io/projected/82bc033a-ffa3-4fd3-8134-2c438391384b-kube-api-access-5kzx5\") pod \"dnsmasq-dns-58dd9ff6bc-bnv6m\" (UID: \"82bc033a-ffa3-4fd3-8134-2c438391384b\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-bnv6m" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.336249 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-config-data\") pod \"horizon-597998b4bf-mrxcd\" (UID: \"3ccf4c22-4fd6-4f83-90c3-2830af4f900f\") " pod="openstack/horizon-597998b4bf-mrxcd" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.336280 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/21be0841-c23b-4e2a-96dd-eebb788a1104-config\") pod \"neutron-db-sync-trdh5\" (UID: \"21be0841-c23b-4e2a-96dd-eebb788a1104\") " pod="openstack/neutron-db-sync-trdh5" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.336297 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-config\") pod \"dnsmasq-dns-785d8bcb8c-qkfgb\" (UID: \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\") " pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.336312 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/019964d2-ae2a-4501-ad53-c6c0b2260a32-config-data\") pod \"ceilometer-0\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " pod="openstack/ceilometer-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.336325 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmkrc\" (UniqueName: \"kubernetes.io/projected/c34ee0aa-60c1-4dce-8a22-5415eb6da004-kube-api-access-dmkrc\") pod \"dnsmasq-dns-785d8bcb8c-qkfgb\" (UID: \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\") " pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.336342 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4zcz\" (UniqueName: \"kubernetes.io/projected/21be0841-c23b-4e2a-96dd-eebb788a1104-kube-api-access-l4zcz\") pod \"neutron-db-sync-trdh5\" (UID: \"21be0841-c23b-4e2a-96dd-eebb788a1104\") " pod="openstack/neutron-db-sync-trdh5" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.336357 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21be0841-c23b-4e2a-96dd-eebb788a1104-combined-ca-bundle\") pod \"neutron-db-sync-trdh5\" (UID: \"21be0841-c23b-4e2a-96dd-eebb788a1104\") " pod="openstack/neutron-db-sync-trdh5" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.336379 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6ff9\" (UniqueName: \"kubernetes.io/projected/019964d2-ae2a-4501-ad53-c6c0b2260a32-kube-api-access-b6ff9\") pod \"ceilometer-0\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " pod="openstack/ceilometer-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.336398 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-ovsdbserver-nb\") pod \"dnsmasq-dns-58dd9ff6bc-bnv6m\" (UID: \"82bc033a-ffa3-4fd3-8134-2c438391384b\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-bnv6m" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.336414 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-dns-swift-storage-0\") pod \"dnsmasq-dns-58dd9ff6bc-bnv6m\" (UID: \"82bc033a-ffa3-4fd3-8134-2c438391384b\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-bnv6m" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.347605 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.355062 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/21be0841-c23b-4e2a-96dd-eebb788a1104-config\") pod \"neutron-db-sync-trdh5\" (UID: \"21be0841-c23b-4e2a-96dd-eebb788a1104\") " pod="openstack/neutron-db-sync-trdh5" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.357678 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21be0841-c23b-4e2a-96dd-eebb788a1104-combined-ca-bundle\") pod \"neutron-db-sync-trdh5\" (UID: \"21be0841-c23b-4e2a-96dd-eebb788a1104\") " pod="openstack/neutron-db-sync-trdh5" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.362414 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-m4xlg" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.372480 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-qkfgb"] Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.371984 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4zcz\" (UniqueName: \"kubernetes.io/projected/21be0841-c23b-4e2a-96dd-eebb788a1104-kube-api-access-l4zcz\") pod \"neutron-db-sync-trdh5\" (UID: \"21be0841-c23b-4e2a-96dd-eebb788a1104\") " pod="openstack/neutron-db-sync-trdh5" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.403945 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-bnv6m" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.404802 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-jwdtv" event={"ID":"b09eb69e-639c-43c9-a70b-0f6089bdfa6c","Type":"ContainerStarted","Data":"d9d6bc73b984c95dd55df0afc2fa18742730fc1e5a369d0dd24d4301d3dcc025"} Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.432383 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.434266 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.439194 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.439441 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.440190 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6ff9\" (UniqueName: \"kubernetes.io/projected/019964d2-ae2a-4501-ad53-c6c0b2260a32-kube-api-access-b6ff9\") pod \"ceilometer-0\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " pod="openstack/ceilometer-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.440219 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-ovsdbserver-nb\") pod \"dnsmasq-dns-58dd9ff6bc-bnv6m\" (UID: \"82bc033a-ffa3-4fd3-8134-2c438391384b\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-bnv6m" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.440238 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-dns-swift-storage-0\") pod \"dnsmasq-dns-58dd9ff6bc-bnv6m\" (UID: \"82bc033a-ffa3-4fd3-8134-2c438391384b\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-bnv6m" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.440255 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-qkfgb\" (UID: \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\") " pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.440276 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-qkfgb\" (UID: \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\") " pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.440301 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-scripts\") pod \"horizon-597998b4bf-mrxcd\" (UID: \"3ccf4c22-4fd6-4f83-90c3-2830af4f900f\") " pod="openstack/horizon-597998b4bf-mrxcd" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.440315 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-qkfgb\" (UID: \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\") " pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.440330 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-qkfgb\" (UID: \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\") " pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.440350 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-dns-svc\") pod \"dnsmasq-dns-58dd9ff6bc-bnv6m\" (UID: \"82bc033a-ffa3-4fd3-8134-2c438391384b\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-bnv6m" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.440366 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-ovsdbserver-sb\") pod \"dnsmasq-dns-58dd9ff6bc-bnv6m\" (UID: \"82bc033a-ffa3-4fd3-8134-2c438391384b\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-bnv6m" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.440390 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/019964d2-ae2a-4501-ad53-c6c0b2260a32-log-httpd\") pod \"ceilometer-0\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " pod="openstack/ceilometer-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.440410 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/019964d2-ae2a-4501-ad53-c6c0b2260a32-scripts\") pod \"ceilometer-0\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " pod="openstack/ceilometer-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.440425 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/019964d2-ae2a-4501-ad53-c6c0b2260a32-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " pod="openstack/ceilometer-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.440443 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7s6p\" (UniqueName: \"kubernetes.io/projected/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-kube-api-access-h7s6p\") pod \"horizon-597998b4bf-mrxcd\" (UID: \"3ccf4c22-4fd6-4f83-90c3-2830af4f900f\") " pod="openstack/horizon-597998b4bf-mrxcd" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.440463 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-logs\") pod \"horizon-597998b4bf-mrxcd\" (UID: \"3ccf4c22-4fd6-4f83-90c3-2830af4f900f\") " pod="openstack/horizon-597998b4bf-mrxcd" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.440485 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-horizon-secret-key\") pod \"horizon-597998b4bf-mrxcd\" (UID: \"3ccf4c22-4fd6-4f83-90c3-2830af4f900f\") " pod="openstack/horizon-597998b4bf-mrxcd" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.440510 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/019964d2-ae2a-4501-ad53-c6c0b2260a32-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " pod="openstack/ceilometer-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.440532 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/019964d2-ae2a-4501-ad53-c6c0b2260a32-run-httpd\") pod \"ceilometer-0\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " pod="openstack/ceilometer-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.440549 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-config\") pod \"dnsmasq-dns-58dd9ff6bc-bnv6m\" (UID: \"82bc033a-ffa3-4fd3-8134-2c438391384b\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-bnv6m" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.440567 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5kzx5\" (UniqueName: \"kubernetes.io/projected/82bc033a-ffa3-4fd3-8134-2c438391384b-kube-api-access-5kzx5\") pod \"dnsmasq-dns-58dd9ff6bc-bnv6m\" (UID: \"82bc033a-ffa3-4fd3-8134-2c438391384b\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-bnv6m" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.440580 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-config-data\") pod \"horizon-597998b4bf-mrxcd\" (UID: \"3ccf4c22-4fd6-4f83-90c3-2830af4f900f\") " pod="openstack/horizon-597998b4bf-mrxcd" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.440611 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/019964d2-ae2a-4501-ad53-c6c0b2260a32-config-data\") pod \"ceilometer-0\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " pod="openstack/ceilometer-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.440624 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-config\") pod \"dnsmasq-dns-785d8bcb8c-qkfgb\" (UID: \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\") " pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.440640 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmkrc\" (UniqueName: \"kubernetes.io/projected/c34ee0aa-60c1-4dce-8a22-5415eb6da004-kube-api-access-dmkrc\") pod \"dnsmasq-dns-785d8bcb8c-qkfgb\" (UID: \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\") " pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.440666 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-z69ht" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.441117 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/019964d2-ae2a-4501-ad53-c6c0b2260a32-log-httpd\") pod \"ceilometer-0\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " pod="openstack/ceilometer-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.441316 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-bnv6m" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.442109 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/019964d2-ae2a-4501-ad53-c6c0b2260a32-run-httpd\") pod \"ceilometer-0\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " pod="openstack/ceilometer-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.442487 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-ovsdbserver-nb\") pod \"dnsmasq-dns-58dd9ff6bc-bnv6m\" (UID: \"82bc033a-ffa3-4fd3-8134-2c438391384b\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-bnv6m" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.443012 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-dns-swift-storage-0\") pod \"dnsmasq-dns-58dd9ff6bc-bnv6m\" (UID: \"82bc033a-ffa3-4fd3-8134-2c438391384b\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-bnv6m" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.443558 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-config\") pod \"dnsmasq-dns-785d8bcb8c-qkfgb\" (UID: \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\") " pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.443997 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-qkfgb\" (UID: \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\") " pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.444468 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-config\") pod \"dnsmasq-dns-58dd9ff6bc-bnv6m\" (UID: \"82bc033a-ffa3-4fd3-8134-2c438391384b\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-bnv6m" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.444746 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-qkfgb\" (UID: \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\") " pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.445062 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-ovsdbserver-sb\") pod \"dnsmasq-dns-58dd9ff6bc-bnv6m\" (UID: \"82bc033a-ffa3-4fd3-8134-2c438391384b\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-bnv6m" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.445494 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-scripts\") pod \"horizon-597998b4bf-mrxcd\" (UID: \"3ccf4c22-4fd6-4f83-90c3-2830af4f900f\") " pod="openstack/horizon-597998b4bf-mrxcd" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.445609 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-qkfgb\" (UID: \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\") " pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.446029 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-qkfgb\" (UID: \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\") " pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.446166 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-config-data\") pod \"horizon-597998b4bf-mrxcd\" (UID: \"3ccf4c22-4fd6-4f83-90c3-2830af4f900f\") " pod="openstack/horizon-597998b4bf-mrxcd" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.448781 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-logs\") pod \"horizon-597998b4bf-mrxcd\" (UID: \"3ccf4c22-4fd6-4f83-90c3-2830af4f900f\") " pod="openstack/horizon-597998b4bf-mrxcd" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.449375 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-dns-svc\") pod \"dnsmasq-dns-58dd9ff6bc-bnv6m\" (UID: \"82bc033a-ffa3-4fd3-8134-2c438391384b\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-bnv6m" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.451512 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-horizon-secret-key\") pod \"horizon-597998b4bf-mrxcd\" (UID: \"3ccf4c22-4fd6-4f83-90c3-2830af4f900f\") " pod="openstack/horizon-597998b4bf-mrxcd" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.454601 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/019964d2-ae2a-4501-ad53-c6c0b2260a32-config-data\") pod \"ceilometer-0\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " pod="openstack/ceilometer-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.462208 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/019964d2-ae2a-4501-ad53-c6c0b2260a32-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " pod="openstack/ceilometer-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.464892 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/019964d2-ae2a-4501-ad53-c6c0b2260a32-scripts\") pod \"ceilometer-0\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " pod="openstack/ceilometer-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.465373 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/019964d2-ae2a-4501-ad53-c6c0b2260a32-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " pod="openstack/ceilometer-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.469007 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5kzx5\" (UniqueName: \"kubernetes.io/projected/82bc033a-ffa3-4fd3-8134-2c438391384b-kube-api-access-5kzx5\") pod \"dnsmasq-dns-58dd9ff6bc-bnv6m\" (UID: \"82bc033a-ffa3-4fd3-8134-2c438391384b\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-bnv6m" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.469862 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmkrc\" (UniqueName: \"kubernetes.io/projected/c34ee0aa-60c1-4dce-8a22-5415eb6da004-kube-api-access-dmkrc\") pod \"dnsmasq-dns-785d8bcb8c-qkfgb\" (UID: \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\") " pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.469936 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.470412 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-trdh5" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.471618 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6ff9\" (UniqueName: \"kubernetes.io/projected/019964d2-ae2a-4501-ad53-c6c0b2260a32-kube-api-access-b6ff9\") pod \"ceilometer-0\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " pod="openstack/ceilometer-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.472551 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7s6p\" (UniqueName: \"kubernetes.io/projected/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-kube-api-access-h7s6p\") pod \"horizon-597998b4bf-mrxcd\" (UID: \"3ccf4c22-4fd6-4f83-90c3-2830af4f900f\") " pod="openstack/horizon-597998b4bf-mrxcd" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.472979 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-rxtmf" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.548661 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-dns-swift-storage-0\") pod \"82bc033a-ffa3-4fd3-8134-2c438391384b\" (UID: \"82bc033a-ffa3-4fd3-8134-2c438391384b\") " Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.548723 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-ovsdbserver-sb\") pod \"82bc033a-ffa3-4fd3-8134-2c438391384b\" (UID: \"82bc033a-ffa3-4fd3-8134-2c438391384b\") " Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.548767 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-config\") pod \"82bc033a-ffa3-4fd3-8134-2c438391384b\" (UID: \"82bc033a-ffa3-4fd3-8134-2c438391384b\") " Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.548806 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-dns-svc\") pod \"82bc033a-ffa3-4fd3-8134-2c438391384b\" (UID: \"82bc033a-ffa3-4fd3-8134-2c438391384b\") " Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.548836 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-ovsdbserver-nb\") pod \"82bc033a-ffa3-4fd3-8134-2c438391384b\" (UID: \"82bc033a-ffa3-4fd3-8134-2c438391384b\") " Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.549070 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f85cc949-c985-4eaf-ad5a-be8936e8b4be-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.549125 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9469c\" (UniqueName: \"kubernetes.io/projected/f85cc949-c985-4eaf-ad5a-be8936e8b4be-kube-api-access-9469c\") pod \"glance-default-external-api-0\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.549166 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.549187 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f85cc949-c985-4eaf-ad5a-be8936e8b4be-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.549215 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f85cc949-c985-4eaf-ad5a-be8936e8b4be-config-data\") pod \"glance-default-external-api-0\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.549222 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-config" (OuterVolumeSpecName: "config") pod "82bc033a-ffa3-4fd3-8134-2c438391384b" (UID: "82bc033a-ffa3-4fd3-8134-2c438391384b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.549243 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f85cc949-c985-4eaf-ad5a-be8936e8b4be-scripts\") pod \"glance-default-external-api-0\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.549310 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f85cc949-c985-4eaf-ad5a-be8936e8b4be-logs\") pod \"glance-default-external-api-0\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.549570 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.549604 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "82bc033a-ffa3-4fd3-8134-2c438391384b" (UID: "82bc033a-ffa3-4fd3-8134-2c438391384b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.550010 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "82bc033a-ffa3-4fd3-8134-2c438391384b" (UID: "82bc033a-ffa3-4fd3-8134-2c438391384b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.550125 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "82bc033a-ffa3-4fd3-8134-2c438391384b" (UID: "82bc033a-ffa3-4fd3-8134-2c438391384b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.550782 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "82bc033a-ffa3-4fd3-8134-2c438391384b" (UID: "82bc033a-ffa3-4fd3-8134-2c438391384b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.605537 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.623288 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-vgrz9"] Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.650771 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5kzx5\" (UniqueName: \"kubernetes.io/projected/82bc033a-ffa3-4fd3-8134-2c438391384b-kube-api-access-5kzx5\") pod \"82bc033a-ffa3-4fd3-8134-2c438391384b\" (UID: \"82bc033a-ffa3-4fd3-8134-2c438391384b\") " Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.651217 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f85cc949-c985-4eaf-ad5a-be8936e8b4be-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.651345 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9469c\" (UniqueName: \"kubernetes.io/projected/f85cc949-c985-4eaf-ad5a-be8936e8b4be-kube-api-access-9469c\") pod \"glance-default-external-api-0\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.651395 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.651416 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f85cc949-c985-4eaf-ad5a-be8936e8b4be-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.651439 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f85cc949-c985-4eaf-ad5a-be8936e8b4be-config-data\") pod \"glance-default-external-api-0\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.651469 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f85cc949-c985-4eaf-ad5a-be8936e8b4be-scripts\") pod \"glance-default-external-api-0\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.651483 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f85cc949-c985-4eaf-ad5a-be8936e8b4be-logs\") pod \"glance-default-external-api-0\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.651535 4693 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.651545 4693 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.651555 4693 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.651563 4693 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82bc033a-ffa3-4fd3-8134-2c438391384b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.651793 4693 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-external-api-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.651913 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f85cc949-c985-4eaf-ad5a-be8936e8b4be-logs\") pod \"glance-default-external-api-0\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.653456 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f85cc949-c985-4eaf-ad5a-be8936e8b4be-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.661835 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-597998b4bf-mrxcd" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.662745 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f85cc949-c985-4eaf-ad5a-be8936e8b4be-config-data\") pod \"glance-default-external-api-0\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.664763 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f85cc949-c985-4eaf-ad5a-be8936e8b4be-scripts\") pod \"glance-default-external-api-0\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.666744 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f85cc949-c985-4eaf-ad5a-be8936e8b4be-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.670995 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82bc033a-ffa3-4fd3-8134-2c438391384b-kube-api-access-5kzx5" (OuterVolumeSpecName: "kube-api-access-5kzx5") pod "82bc033a-ffa3-4fd3-8134-2c438391384b" (UID: "82bc033a-ffa3-4fd3-8134-2c438391384b"). InnerVolumeSpecName "kube-api-access-5kzx5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.672409 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9469c\" (UniqueName: \"kubernetes.io/projected/f85cc949-c985-4eaf-ad5a-be8936e8b4be-kube-api-access-9469c\") pod \"glance-default-external-api-0\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.680254 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.689360 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.757574 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5kzx5\" (UniqueName: \"kubernetes.io/projected/82bc033a-ffa3-4fd3-8134-2c438391384b-kube-api-access-5kzx5\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.759967 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 08 07:33:18 crc kubenswrapper[4693]: I1008 07:33:18.802731 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-mn4l6"] Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.000850 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5c7456bbdf-llw58"] Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.114211 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-r7p2j"] Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.130508 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-rxtmf"] Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.314015 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-qkfgb"] Oct 08 07:33:19 crc kubenswrapper[4693]: W1008 07:33:19.318133 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod21be0841_c23b_4e2a_96dd_eebb788a1104.slice/crio-0cb5334842add8d97255687a0e6acde820058235b5ba8c80853a737971839a14 WatchSource:0}: Error finding container 0cb5334842add8d97255687a0e6acde820058235b5ba8c80853a737971839a14: Status 404 returned error can't find the container with id 0cb5334842add8d97255687a0e6acde820058235b5ba8c80853a737971839a14 Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.323689 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-trdh5"] Oct 08 07:33:19 crc kubenswrapper[4693]: W1008 07:33:19.327507 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb4d301e9_d078_4876_a6a2_52a7c3b4dcbe.slice/crio-a26f89784069255b23e6d92b51296afb3716749d97d4e8e04150d1d52ec765cd WatchSource:0}: Error finding container a26f89784069255b23e6d92b51296afb3716749d97d4e8e04150d1d52ec765cd: Status 404 returned error can't find the container with id a26f89784069255b23e6d92b51296afb3716749d97d4e8e04150d1d52ec765cd Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.330077 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-m4xlg"] Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.387159 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.416632 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-r7p2j" event={"ID":"e1bdcd99-a53d-45ee-b439-57c0e0025fb9","Type":"ContainerStarted","Data":"ee360127a2ec152e3e98351c393aa470cbbebe6c719cffc6b7a7e0e6cbb5ca3b"} Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.418863 4693 generic.go:334] "Generic (PLEG): container finished" podID="ea9cbb40-f943-4242-96b5-de1c252ec7c1" containerID="73776fd2f458b852bd75ba21af119795dcc7661b70d1188a2fbb1477220dcd64" exitCode=0 Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.418952 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959f8865f-mn4l6" event={"ID":"ea9cbb40-f943-4242-96b5-de1c252ec7c1","Type":"ContainerDied","Data":"73776fd2f458b852bd75ba21af119795dcc7661b70d1188a2fbb1477220dcd64"} Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.418977 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959f8865f-mn4l6" event={"ID":"ea9cbb40-f943-4242-96b5-de1c252ec7c1","Type":"ContainerStarted","Data":"844200c0889160410464b05bc9e702bd7b6cc4f1ae1639dde0563f422f5aa8bf"} Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.422854 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-trdh5" event={"ID":"21be0841-c23b-4e2a-96dd-eebb788a1104","Type":"ContainerStarted","Data":"0cb5334842add8d97255687a0e6acde820058235b5ba8c80853a737971839a14"} Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.434494 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5c7456bbdf-llw58" event={"ID":"8c65eaa0-7e61-4044-99d2-c61192d02cae","Type":"ContainerStarted","Data":"30f3beeca361f2f330453bf566191104dd7cd5d4efa2406d18591e388be289f5"} Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.442101 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.447465 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.450186 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.450426 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-rxtmf" event={"ID":"79884c8c-f689-46b7-9223-66bd0b7bff8e","Type":"ContainerStarted","Data":"a41bd54365902d5084b4ec6202695bd061f8a1b9a88cea50c0b45030a550c94a"} Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.461869 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" event={"ID":"c34ee0aa-60c1-4dce-8a22-5415eb6da004","Type":"ContainerStarted","Data":"238f0b10b09dab492e6ab6447f224a163cf6b07624a01603561338b5c9b0cd03"} Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.474422 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-m4xlg" event={"ID":"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe","Type":"ContainerStarted","Data":"a26f89784069255b23e6d92b51296afb3716749d97d4e8e04150d1d52ec765cd"} Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.483338 4693 generic.go:334] "Generic (PLEG): container finished" podID="b09eb69e-639c-43c9-a70b-0f6089bdfa6c" containerID="dd85988013b7954ec944b827e97ac5d2142ad6defc20cfd811883e86241987e4" exitCode=0 Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.483417 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-jwdtv" event={"ID":"b09eb69e-639c-43c9-a70b-0f6089bdfa6c","Type":"ContainerDied","Data":"dd85988013b7954ec944b827e97ac5d2142ad6defc20cfd811883e86241987e4"} Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.487366 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.493054 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vgrz9" event={"ID":"bac28fa6-fc14-4384-a5c2-969c7f422bbf","Type":"ContainerStarted","Data":"4b9738f5d8570ba3579097538ce1438277cc9dd0a3519d9dc7f2eb72c010df5c"} Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.493105 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vgrz9" event={"ID":"bac28fa6-fc14-4384-a5c2-969c7f422bbf","Type":"ContainerStarted","Data":"5e9607d681e43d76015f727280b662bacaa3ea0cacf9d4347b4d15be595b93d2"} Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.494702 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-bnv6m" Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.495320 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"019964d2-ae2a-4501-ad53-c6c0b2260a32","Type":"ContainerStarted","Data":"e2985ddb30e6886812d13f7075db5e5b21e23c8425eaeb3f0fd98c4ebe00a316"} Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.529182 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-597998b4bf-mrxcd"] Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.560928 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-vgrz9" podStartSLOduration=2.560912419 podStartE2EDuration="2.560912419s" podCreationTimestamp="2025-10-08 07:33:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:33:19.534114602 +0000 UTC m=+984.905079547" watchObservedRunningTime="2025-10-08 07:33:19.560912419 +0000 UTC m=+984.931877344" Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.578909 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.578981 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8f5d2\" (UniqueName: \"kubernetes.io/projected/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-kube-api-access-8f5d2\") pod \"glance-default-internal-api-0\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.579036 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.579056 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.579147 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.579176 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-logs\") pod \"glance-default-internal-api-0\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.579208 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.579445 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-bnv6m"] Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.587274 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-bnv6m"] Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.610719 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 08 07:33:19 crc kubenswrapper[4693]: W1008 07:33:19.641245 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf85cc949_c985_4eaf_ad5a_be8936e8b4be.slice/crio-5e22efa897dac524a6684f63c0570eadf6c2bb3a5ceb742981133b7ecb17fc03 WatchSource:0}: Error finding container 5e22efa897dac524a6684f63c0570eadf6c2bb3a5ceb742981133b7ecb17fc03: Status 404 returned error can't find the container with id 5e22efa897dac524a6684f63c0570eadf6c2bb3a5ceb742981133b7ecb17fc03 Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.690161 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.690456 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-logs\") pod \"glance-default-internal-api-0\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.690509 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.690560 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.690606 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8f5d2\" (UniqueName: \"kubernetes.io/projected/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-kube-api-access-8f5d2\") pod \"glance-default-internal-api-0\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.690649 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.690672 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.693435 4693 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-internal-api-0" Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.695027 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.695282 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-logs\") pod \"glance-default-internal-api-0\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.697995 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.716672 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8f5d2\" (UniqueName: \"kubernetes.io/projected/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-kube-api-access-8f5d2\") pod \"glance-default-internal-api-0\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.717065 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.717848 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:19 crc kubenswrapper[4693]: I1008 07:33:19.799986 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.001339 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-mn4l6" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.003732 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-jwdtv" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.085039 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.099259 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-ovsdbserver-nb\") pod \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\" (UID: \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\") " Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.099381 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-dns-swift-storage-0\") pod \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\" (UID: \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\") " Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.099462 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-ovsdbserver-sb\") pod \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\" (UID: \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\") " Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.099619 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-dns-swift-storage-0\") pod \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\" (UID: \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\") " Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.099644 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-dns-svc\") pod \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\" (UID: \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\") " Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.099683 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-skfzw\" (UniqueName: \"kubernetes.io/projected/ea9cbb40-f943-4242-96b5-de1c252ec7c1-kube-api-access-skfzw\") pod \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\" (UID: \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\") " Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.099703 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-ovsdbserver-nb\") pod \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\" (UID: \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\") " Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.100096 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-dns-svc\") pod \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\" (UID: \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\") " Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.100142 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-ovsdbserver-sb\") pod \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\" (UID: \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\") " Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.100204 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-config\") pod \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\" (UID: \"ea9cbb40-f943-4242-96b5-de1c252ec7c1\") " Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.100221 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ttgzw\" (UniqueName: \"kubernetes.io/projected/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-kube-api-access-ttgzw\") pod \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\" (UID: \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\") " Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.100259 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-config\") pod \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\" (UID: \"b09eb69e-639c-43c9-a70b-0f6089bdfa6c\") " Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.125657 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-kube-api-access-ttgzw" (OuterVolumeSpecName: "kube-api-access-ttgzw") pod "b09eb69e-639c-43c9-a70b-0f6089bdfa6c" (UID: "b09eb69e-639c-43c9-a70b-0f6089bdfa6c"). InnerVolumeSpecName "kube-api-access-ttgzw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.129012 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea9cbb40-f943-4242-96b5-de1c252ec7c1-kube-api-access-skfzw" (OuterVolumeSpecName: "kube-api-access-skfzw") pod "ea9cbb40-f943-4242-96b5-de1c252ec7c1" (UID: "ea9cbb40-f943-4242-96b5-de1c252ec7c1"). InnerVolumeSpecName "kube-api-access-skfzw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.152884 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ea9cbb40-f943-4242-96b5-de1c252ec7c1" (UID: "ea9cbb40-f943-4242-96b5-de1c252ec7c1"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.172400 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b09eb69e-639c-43c9-a70b-0f6089bdfa6c" (UID: "b09eb69e-639c-43c9-a70b-0f6089bdfa6c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.176000 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b09eb69e-639c-43c9-a70b-0f6089bdfa6c" (UID: "b09eb69e-639c-43c9-a70b-0f6089bdfa6c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.196601 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-config" (OuterVolumeSpecName: "config") pod "ea9cbb40-f943-4242-96b5-de1c252ec7c1" (UID: "ea9cbb40-f943-4242-96b5-de1c252ec7c1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.204626 4693 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.204662 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.204672 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ttgzw\" (UniqueName: \"kubernetes.io/projected/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-kube-api-access-ttgzw\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.204684 4693 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.204695 4693 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.204702 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-skfzw\" (UniqueName: \"kubernetes.io/projected/ea9cbb40-f943-4242-96b5-de1c252ec7c1-kube-api-access-skfzw\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.212313 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b09eb69e-639c-43c9-a70b-0f6089bdfa6c" (UID: "b09eb69e-639c-43c9-a70b-0f6089bdfa6c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.216404 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ea9cbb40-f943-4242-96b5-de1c252ec7c1" (UID: "ea9cbb40-f943-4242-96b5-de1c252ec7c1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.222007 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b09eb69e-639c-43c9-a70b-0f6089bdfa6c" (UID: "b09eb69e-639c-43c9-a70b-0f6089bdfa6c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.226229 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ea9cbb40-f943-4242-96b5-de1c252ec7c1" (UID: "ea9cbb40-f943-4242-96b5-de1c252ec7c1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.227203 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ea9cbb40-f943-4242-96b5-de1c252ec7c1" (UID: "ea9cbb40-f943-4242-96b5-de1c252ec7c1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.237077 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-config" (OuterVolumeSpecName: "config") pod "b09eb69e-639c-43c9-a70b-0f6089bdfa6c" (UID: "b09eb69e-639c-43c9-a70b-0f6089bdfa6c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.305682 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.305951 4693 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.305962 4693 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.305969 4693 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b09eb69e-639c-43c9-a70b-0f6089bdfa6c-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.305977 4693 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.305985 4693 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ea9cbb40-f943-4242-96b5-de1c252ec7c1-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.431868 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.452749 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-597998b4bf-mrxcd"] Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.493765 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-748d7c6795-mlmk2"] Oct 08 07:33:20 crc kubenswrapper[4693]: E1008 07:33:20.494166 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b09eb69e-639c-43c9-a70b-0f6089bdfa6c" containerName="init" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.494178 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="b09eb69e-639c-43c9-a70b-0f6089bdfa6c" containerName="init" Oct 08 07:33:20 crc kubenswrapper[4693]: E1008 07:33:20.494206 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea9cbb40-f943-4242-96b5-de1c252ec7c1" containerName="init" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.494212 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea9cbb40-f943-4242-96b5-de1c252ec7c1" containerName="init" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.494386 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea9cbb40-f943-4242-96b5-de1c252ec7c1" containerName="init" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.494395 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="b09eb69e-639c-43c9-a70b-0f6089bdfa6c" containerName="init" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.495275 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-748d7c6795-mlmk2" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.503300 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.559187 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-748d7c6795-mlmk2"] Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.562100 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959f8865f-mn4l6" event={"ID":"ea9cbb40-f943-4242-96b5-de1c252ec7c1","Type":"ContainerDied","Data":"844200c0889160410464b05bc9e702bd7b6cc4f1ae1639dde0563f422f5aa8bf"} Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.562164 4693 scope.go:117] "RemoveContainer" containerID="73776fd2f458b852bd75ba21af119795dcc7661b70d1188a2fbb1477220dcd64" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.562349 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-mn4l6" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.571600 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-trdh5" event={"ID":"21be0841-c23b-4e2a-96dd-eebb788a1104","Type":"ContainerStarted","Data":"c6093f0600894f583cae174d5ee703161897f41507011bfd222d6d824898c7a1"} Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.573911 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-597998b4bf-mrxcd" event={"ID":"3ccf4c22-4fd6-4f83-90c3-2830af4f900f","Type":"ContainerStarted","Data":"d3c57b9a0cc2f7cd37dec8a68fe80b938f8986625037ada3732d528d08d7d5cc"} Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.576540 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-jwdtv" event={"ID":"b09eb69e-639c-43c9-a70b-0f6089bdfa6c","Type":"ContainerDied","Data":"d9d6bc73b984c95dd55df0afc2fa18742730fc1e5a369d0dd24d4301d3dcc025"} Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.576633 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-jwdtv" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.578080 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f85cc949-c985-4eaf-ad5a-be8936e8b4be","Type":"ContainerStarted","Data":"5e22efa897dac524a6684f63c0570eadf6c2bb3a5ceb742981133b7ecb17fc03"} Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.579495 4693 generic.go:334] "Generic (PLEG): container finished" podID="c34ee0aa-60c1-4dce-8a22-5415eb6da004" containerID="e22267284f4fcaa7c0087f73c53744f55292866a67c2c9416e44c2620a6a37dd" exitCode=0 Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.579929 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" event={"ID":"c34ee0aa-60c1-4dce-8a22-5415eb6da004","Type":"ContainerDied","Data":"e22267284f4fcaa7c0087f73c53744f55292866a67c2c9416e44c2620a6a37dd"} Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.599067 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-trdh5" podStartSLOduration=3.599048549 podStartE2EDuration="3.599048549s" podCreationTimestamp="2025-10-08 07:33:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:33:20.590716319 +0000 UTC m=+985.961681254" watchObservedRunningTime="2025-10-08 07:33:20.599048549 +0000 UTC m=+985.970013484" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.615472 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-scripts\") pod \"horizon-748d7c6795-mlmk2\" (UID: \"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb\") " pod="openstack/horizon-748d7c6795-mlmk2" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.615568 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-config-data\") pod \"horizon-748d7c6795-mlmk2\" (UID: \"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb\") " pod="openstack/horizon-748d7c6795-mlmk2" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.615589 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-horizon-secret-key\") pod \"horizon-748d7c6795-mlmk2\" (UID: \"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb\") " pod="openstack/horizon-748d7c6795-mlmk2" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.615615 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z56f8\" (UniqueName: \"kubernetes.io/projected/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-kube-api-access-z56f8\") pod \"horizon-748d7c6795-mlmk2\" (UID: \"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb\") " pod="openstack/horizon-748d7c6795-mlmk2" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.615647 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-logs\") pod \"horizon-748d7c6795-mlmk2\" (UID: \"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb\") " pod="openstack/horizon-748d7c6795-mlmk2" Oct 08 07:33:20 crc kubenswrapper[4693]: I1008 07:33:20.693897 4693 scope.go:117] "RemoveContainer" containerID="dd85988013b7954ec944b827e97ac5d2142ad6defc20cfd811883e86241987e4" Oct 08 07:33:21 crc kubenswrapper[4693]: I1008 07:33:20.717421 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-logs\") pod \"horizon-748d7c6795-mlmk2\" (UID: \"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb\") " pod="openstack/horizon-748d7c6795-mlmk2" Oct 08 07:33:21 crc kubenswrapper[4693]: I1008 07:33:20.717656 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-scripts\") pod \"horizon-748d7c6795-mlmk2\" (UID: \"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb\") " pod="openstack/horizon-748d7c6795-mlmk2" Oct 08 07:33:21 crc kubenswrapper[4693]: I1008 07:33:20.717831 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-config-data\") pod \"horizon-748d7c6795-mlmk2\" (UID: \"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb\") " pod="openstack/horizon-748d7c6795-mlmk2" Oct 08 07:33:21 crc kubenswrapper[4693]: I1008 07:33:20.717850 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-horizon-secret-key\") pod \"horizon-748d7c6795-mlmk2\" (UID: \"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb\") " pod="openstack/horizon-748d7c6795-mlmk2" Oct 08 07:33:21 crc kubenswrapper[4693]: I1008 07:33:20.717870 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z56f8\" (UniqueName: \"kubernetes.io/projected/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-kube-api-access-z56f8\") pod \"horizon-748d7c6795-mlmk2\" (UID: \"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb\") " pod="openstack/horizon-748d7c6795-mlmk2" Oct 08 07:33:21 crc kubenswrapper[4693]: I1008 07:33:20.731301 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:33:21 crc kubenswrapper[4693]: I1008 07:33:20.731746 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-logs\") pod \"horizon-748d7c6795-mlmk2\" (UID: \"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb\") " pod="openstack/horizon-748d7c6795-mlmk2" Oct 08 07:33:21 crc kubenswrapper[4693]: I1008 07:33:20.732280 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-scripts\") pod \"horizon-748d7c6795-mlmk2\" (UID: \"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb\") " pod="openstack/horizon-748d7c6795-mlmk2" Oct 08 07:33:21 crc kubenswrapper[4693]: I1008 07:33:20.739528 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-jwdtv"] Oct 08 07:33:21 crc kubenswrapper[4693]: I1008 07:33:20.748471 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-horizon-secret-key\") pod \"horizon-748d7c6795-mlmk2\" (UID: \"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb\") " pod="openstack/horizon-748d7c6795-mlmk2" Oct 08 07:33:21 crc kubenswrapper[4693]: I1008 07:33:20.749179 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-jwdtv"] Oct 08 07:33:21 crc kubenswrapper[4693]: I1008 07:33:20.753526 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-config-data\") pod \"horizon-748d7c6795-mlmk2\" (UID: \"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb\") " pod="openstack/horizon-748d7c6795-mlmk2" Oct 08 07:33:21 crc kubenswrapper[4693]: I1008 07:33:20.764466 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z56f8\" (UniqueName: \"kubernetes.io/projected/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-kube-api-access-z56f8\") pod \"horizon-748d7c6795-mlmk2\" (UID: \"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb\") " pod="openstack/horizon-748d7c6795-mlmk2" Oct 08 07:33:21 crc kubenswrapper[4693]: I1008 07:33:20.765169 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-mn4l6"] Oct 08 07:33:21 crc kubenswrapper[4693]: I1008 07:33:20.771578 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-mn4l6"] Oct 08 07:33:21 crc kubenswrapper[4693]: I1008 07:33:20.926354 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-748d7c6795-mlmk2" Oct 08 07:33:21 crc kubenswrapper[4693]: I1008 07:33:21.036040 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 08 07:33:21 crc kubenswrapper[4693]: I1008 07:33:21.380018 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82bc033a-ffa3-4fd3-8134-2c438391384b" path="/var/lib/kubelet/pods/82bc033a-ffa3-4fd3-8134-2c438391384b/volumes" Oct 08 07:33:21 crc kubenswrapper[4693]: I1008 07:33:21.381128 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b09eb69e-639c-43c9-a70b-0f6089bdfa6c" path="/var/lib/kubelet/pods/b09eb69e-639c-43c9-a70b-0f6089bdfa6c/volumes" Oct 08 07:33:21 crc kubenswrapper[4693]: I1008 07:33:21.383795 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea9cbb40-f943-4242-96b5-de1c252ec7c1" path="/var/lib/kubelet/pods/ea9cbb40-f943-4242-96b5-de1c252ec7c1/volumes" Oct 08 07:33:21 crc kubenswrapper[4693]: I1008 07:33:21.621091 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb","Type":"ContainerStarted","Data":"a72b64f002dbaa6cbdb7d5834c224ed3b0f20257c79e7a68e1671be2ef0a14e6"} Oct 08 07:33:21 crc kubenswrapper[4693]: I1008 07:33:21.626786 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" event={"ID":"c34ee0aa-60c1-4dce-8a22-5415eb6da004","Type":"ContainerStarted","Data":"fc3c598c4d37e5c2481815949bbebaec782b310b23a8009a7a37554fbc6f2a78"} Oct 08 07:33:21 crc kubenswrapper[4693]: I1008 07:33:21.627121 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" Oct 08 07:33:21 crc kubenswrapper[4693]: I1008 07:33:21.632804 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f85cc949-c985-4eaf-ad5a-be8936e8b4be","Type":"ContainerStarted","Data":"ddf931381edfa2ec41c19ffeea420c0679bc749718ebbacb12ac413dfc95cac7"} Oct 08 07:33:21 crc kubenswrapper[4693]: I1008 07:33:21.650844 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" podStartSLOduration=3.650796569 podStartE2EDuration="3.650796569s" podCreationTimestamp="2025-10-08 07:33:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:33:21.645873819 +0000 UTC m=+987.016838764" watchObservedRunningTime="2025-10-08 07:33:21.650796569 +0000 UTC m=+987.021761504" Oct 08 07:33:22 crc kubenswrapper[4693]: I1008 07:33:22.063227 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-748d7c6795-mlmk2"] Oct 08 07:33:22 crc kubenswrapper[4693]: W1008 07:33:22.076601 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda0794a6e_f5fe_4a0a_ae43_57e9bae8dbdb.slice/crio-70773bc51e9f6375f63f23da545a03b434516f5462e6edbd42b02df42de7578a WatchSource:0}: Error finding container 70773bc51e9f6375f63f23da545a03b434516f5462e6edbd42b02df42de7578a: Status 404 returned error can't find the container with id 70773bc51e9f6375f63f23da545a03b434516f5462e6edbd42b02df42de7578a Oct 08 07:33:22 crc kubenswrapper[4693]: I1008 07:33:22.644942 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f85cc949-c985-4eaf-ad5a-be8936e8b4be","Type":"ContainerStarted","Data":"9bb9dc103f93446e8fb1064c329a53072f6a3a6dab6deacf47d85793e68b6918"} Oct 08 07:33:22 crc kubenswrapper[4693]: I1008 07:33:22.645269 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="f85cc949-c985-4eaf-ad5a-be8936e8b4be" containerName="glance-log" containerID="cri-o://ddf931381edfa2ec41c19ffeea420c0679bc749718ebbacb12ac413dfc95cac7" gracePeriod=30 Oct 08 07:33:22 crc kubenswrapper[4693]: I1008 07:33:22.645652 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="f85cc949-c985-4eaf-ad5a-be8936e8b4be" containerName="glance-httpd" containerID="cri-o://9bb9dc103f93446e8fb1064c329a53072f6a3a6dab6deacf47d85793e68b6918" gracePeriod=30 Oct 08 07:33:22 crc kubenswrapper[4693]: I1008 07:33:22.647558 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-748d7c6795-mlmk2" event={"ID":"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb","Type":"ContainerStarted","Data":"70773bc51e9f6375f63f23da545a03b434516f5462e6edbd42b02df42de7578a"} Oct 08 07:33:22 crc kubenswrapper[4693]: I1008 07:33:22.651471 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb","Type":"ContainerStarted","Data":"ffe574e9d177e952f5cb2173c94d5abbcfc9b243eef95d37d195aab863dac74b"} Oct 08 07:33:23 crc kubenswrapper[4693]: I1008 07:33:23.664289 4693 generic.go:334] "Generic (PLEG): container finished" podID="bac28fa6-fc14-4384-a5c2-969c7f422bbf" containerID="4b9738f5d8570ba3579097538ce1438277cc9dd0a3519d9dc7f2eb72c010df5c" exitCode=0 Oct 08 07:33:23 crc kubenswrapper[4693]: I1008 07:33:23.664349 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vgrz9" event={"ID":"bac28fa6-fc14-4384-a5c2-969c7f422bbf","Type":"ContainerDied","Data":"4b9738f5d8570ba3579097538ce1438277cc9dd0a3519d9dc7f2eb72c010df5c"} Oct 08 07:33:23 crc kubenswrapper[4693]: I1008 07:33:23.668063 4693 generic.go:334] "Generic (PLEG): container finished" podID="f85cc949-c985-4eaf-ad5a-be8936e8b4be" containerID="9bb9dc103f93446e8fb1064c329a53072f6a3a6dab6deacf47d85793e68b6918" exitCode=0 Oct 08 07:33:23 crc kubenswrapper[4693]: I1008 07:33:23.668090 4693 generic.go:334] "Generic (PLEG): container finished" podID="f85cc949-c985-4eaf-ad5a-be8936e8b4be" containerID="ddf931381edfa2ec41c19ffeea420c0679bc749718ebbacb12ac413dfc95cac7" exitCode=143 Oct 08 07:33:23 crc kubenswrapper[4693]: I1008 07:33:23.668110 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f85cc949-c985-4eaf-ad5a-be8936e8b4be","Type":"ContainerDied","Data":"9bb9dc103f93446e8fb1064c329a53072f6a3a6dab6deacf47d85793e68b6918"} Oct 08 07:33:23 crc kubenswrapper[4693]: I1008 07:33:23.668133 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f85cc949-c985-4eaf-ad5a-be8936e8b4be","Type":"ContainerDied","Data":"ddf931381edfa2ec41c19ffeea420c0679bc749718ebbacb12ac413dfc95cac7"} Oct 08 07:33:23 crc kubenswrapper[4693]: I1008 07:33:23.684422 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.684398914 podStartE2EDuration="5.684398914s" podCreationTimestamp="2025-10-08 07:33:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:33:22.664280609 +0000 UTC m=+988.035245564" watchObservedRunningTime="2025-10-08 07:33:23.684398914 +0000 UTC m=+989.055363859" Oct 08 07:33:24 crc kubenswrapper[4693]: I1008 07:33:24.684609 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb","Type":"ContainerStarted","Data":"26f7fd33ebf372415bbfe4f31de7020983156b14baccd40022a5bc5878755713"} Oct 08 07:33:24 crc kubenswrapper[4693]: I1008 07:33:24.684752 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb" containerName="glance-log" containerID="cri-o://ffe574e9d177e952f5cb2173c94d5abbcfc9b243eef95d37d195aab863dac74b" gracePeriod=30 Oct 08 07:33:24 crc kubenswrapper[4693]: I1008 07:33:24.685044 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb" containerName="glance-httpd" containerID="cri-o://26f7fd33ebf372415bbfe4f31de7020983156b14baccd40022a5bc5878755713" gracePeriod=30 Oct 08 07:33:24 crc kubenswrapper[4693]: I1008 07:33:24.718386 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.718370495 podStartE2EDuration="6.718370495s" podCreationTimestamp="2025-10-08 07:33:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:33:24.703487152 +0000 UTC m=+990.074452077" watchObservedRunningTime="2025-10-08 07:33:24.718370495 +0000 UTC m=+990.089335430" Oct 08 07:33:25 crc kubenswrapper[4693]: I1008 07:33:25.698695 4693 generic.go:334] "Generic (PLEG): container finished" podID="9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb" containerID="26f7fd33ebf372415bbfe4f31de7020983156b14baccd40022a5bc5878755713" exitCode=0 Oct 08 07:33:25 crc kubenswrapper[4693]: I1008 07:33:25.699086 4693 generic.go:334] "Generic (PLEG): container finished" podID="9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb" containerID="ffe574e9d177e952f5cb2173c94d5abbcfc9b243eef95d37d195aab863dac74b" exitCode=143 Oct 08 07:33:25 crc kubenswrapper[4693]: I1008 07:33:25.698860 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb","Type":"ContainerDied","Data":"26f7fd33ebf372415bbfe4f31de7020983156b14baccd40022a5bc5878755713"} Oct 08 07:33:25 crc kubenswrapper[4693]: I1008 07:33:25.699137 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb","Type":"ContainerDied","Data":"ffe574e9d177e952f5cb2173c94d5abbcfc9b243eef95d37d195aab863dac74b"} Oct 08 07:33:26 crc kubenswrapper[4693]: I1008 07:33:26.533428 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vgrz9" Oct 08 07:33:26 crc kubenswrapper[4693]: I1008 07:33:26.662225 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-config-data\") pod \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\" (UID: \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\") " Oct 08 07:33:26 crc kubenswrapper[4693]: I1008 07:33:26.662628 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-credential-keys\") pod \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\" (UID: \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\") " Oct 08 07:33:26 crc kubenswrapper[4693]: I1008 07:33:26.662733 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-combined-ca-bundle\") pod \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\" (UID: \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\") " Oct 08 07:33:26 crc kubenswrapper[4693]: I1008 07:33:26.662801 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kns79\" (UniqueName: \"kubernetes.io/projected/bac28fa6-fc14-4384-a5c2-969c7f422bbf-kube-api-access-kns79\") pod \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\" (UID: \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\") " Oct 08 07:33:26 crc kubenswrapper[4693]: I1008 07:33:26.662856 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-scripts\") pod \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\" (UID: \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\") " Oct 08 07:33:26 crc kubenswrapper[4693]: I1008 07:33:26.662874 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-fernet-keys\") pod \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\" (UID: \"bac28fa6-fc14-4384-a5c2-969c7f422bbf\") " Oct 08 07:33:26 crc kubenswrapper[4693]: I1008 07:33:26.669008 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "bac28fa6-fc14-4384-a5c2-969c7f422bbf" (UID: "bac28fa6-fc14-4384-a5c2-969c7f422bbf"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:33:26 crc kubenswrapper[4693]: I1008 07:33:26.669055 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "bac28fa6-fc14-4384-a5c2-969c7f422bbf" (UID: "bac28fa6-fc14-4384-a5c2-969c7f422bbf"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:33:26 crc kubenswrapper[4693]: I1008 07:33:26.669166 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-scripts" (OuterVolumeSpecName: "scripts") pod "bac28fa6-fc14-4384-a5c2-969c7f422bbf" (UID: "bac28fa6-fc14-4384-a5c2-969c7f422bbf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:33:26 crc kubenswrapper[4693]: I1008 07:33:26.672170 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bac28fa6-fc14-4384-a5c2-969c7f422bbf-kube-api-access-kns79" (OuterVolumeSpecName: "kube-api-access-kns79") pod "bac28fa6-fc14-4384-a5c2-969c7f422bbf" (UID: "bac28fa6-fc14-4384-a5c2-969c7f422bbf"). InnerVolumeSpecName "kube-api-access-kns79". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:33:26 crc kubenswrapper[4693]: I1008 07:33:26.697397 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bac28fa6-fc14-4384-a5c2-969c7f422bbf" (UID: "bac28fa6-fc14-4384-a5c2-969c7f422bbf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:33:26 crc kubenswrapper[4693]: I1008 07:33:26.698558 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-config-data" (OuterVolumeSpecName: "config-data") pod "bac28fa6-fc14-4384-a5c2-969c7f422bbf" (UID: "bac28fa6-fc14-4384-a5c2-969c7f422bbf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:33:26 crc kubenswrapper[4693]: I1008 07:33:26.710515 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vgrz9" event={"ID":"bac28fa6-fc14-4384-a5c2-969c7f422bbf","Type":"ContainerDied","Data":"5e9607d681e43d76015f727280b662bacaa3ea0cacf9d4347b4d15be595b93d2"} Oct 08 07:33:26 crc kubenswrapper[4693]: I1008 07:33:26.710558 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5e9607d681e43d76015f727280b662bacaa3ea0cacf9d4347b4d15be595b93d2" Oct 08 07:33:26 crc kubenswrapper[4693]: I1008 07:33:26.710638 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vgrz9" Oct 08 07:33:26 crc kubenswrapper[4693]: I1008 07:33:26.765865 4693 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:26 crc kubenswrapper[4693]: I1008 07:33:26.765896 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:26 crc kubenswrapper[4693]: I1008 07:33:26.765908 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kns79\" (UniqueName: \"kubernetes.io/projected/bac28fa6-fc14-4384-a5c2-969c7f422bbf-kube-api-access-kns79\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:26 crc kubenswrapper[4693]: I1008 07:33:26.765919 4693 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-scripts\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:26 crc kubenswrapper[4693]: I1008 07:33:26.765930 4693 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:26 crc kubenswrapper[4693]: I1008 07:33:26.765943 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bac28fa6-fc14-4384-a5c2-969c7f422bbf-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:27 crc kubenswrapper[4693]: I1008 07:33:27.664904 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-vgrz9"] Oct 08 07:33:27 crc kubenswrapper[4693]: I1008 07:33:27.670760 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-vgrz9"] Oct 08 07:33:27 crc kubenswrapper[4693]: I1008 07:33:27.763028 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-7xn4v"] Oct 08 07:33:27 crc kubenswrapper[4693]: E1008 07:33:27.763384 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bac28fa6-fc14-4384-a5c2-969c7f422bbf" containerName="keystone-bootstrap" Oct 08 07:33:27 crc kubenswrapper[4693]: I1008 07:33:27.763398 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="bac28fa6-fc14-4384-a5c2-969c7f422bbf" containerName="keystone-bootstrap" Oct 08 07:33:27 crc kubenswrapper[4693]: I1008 07:33:27.763560 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="bac28fa6-fc14-4384-a5c2-969c7f422bbf" containerName="keystone-bootstrap" Oct 08 07:33:27 crc kubenswrapper[4693]: I1008 07:33:27.764077 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7xn4v" Oct 08 07:33:27 crc kubenswrapper[4693]: I1008 07:33:27.767566 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-nd6nt" Oct 08 07:33:27 crc kubenswrapper[4693]: I1008 07:33:27.768544 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 08 07:33:27 crc kubenswrapper[4693]: I1008 07:33:27.769343 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 08 07:33:27 crc kubenswrapper[4693]: I1008 07:33:27.770253 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 08 07:33:27 crc kubenswrapper[4693]: I1008 07:33:27.778133 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-7xn4v"] Oct 08 07:33:27 crc kubenswrapper[4693]: I1008 07:33:27.888011 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-combined-ca-bundle\") pod \"keystone-bootstrap-7xn4v\" (UID: \"04536603-9f35-460a-b169-a462d38283b9\") " pod="openstack/keystone-bootstrap-7xn4v" Oct 08 07:33:27 crc kubenswrapper[4693]: I1008 07:33:27.888078 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hr6rs\" (UniqueName: \"kubernetes.io/projected/04536603-9f35-460a-b169-a462d38283b9-kube-api-access-hr6rs\") pod \"keystone-bootstrap-7xn4v\" (UID: \"04536603-9f35-460a-b169-a462d38283b9\") " pod="openstack/keystone-bootstrap-7xn4v" Oct 08 07:33:27 crc kubenswrapper[4693]: I1008 07:33:27.888122 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-scripts\") pod \"keystone-bootstrap-7xn4v\" (UID: \"04536603-9f35-460a-b169-a462d38283b9\") " pod="openstack/keystone-bootstrap-7xn4v" Oct 08 07:33:27 crc kubenswrapper[4693]: I1008 07:33:27.888147 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-fernet-keys\") pod \"keystone-bootstrap-7xn4v\" (UID: \"04536603-9f35-460a-b169-a462d38283b9\") " pod="openstack/keystone-bootstrap-7xn4v" Oct 08 07:33:27 crc kubenswrapper[4693]: I1008 07:33:27.888162 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-credential-keys\") pod \"keystone-bootstrap-7xn4v\" (UID: \"04536603-9f35-460a-b169-a462d38283b9\") " pod="openstack/keystone-bootstrap-7xn4v" Oct 08 07:33:27 crc kubenswrapper[4693]: I1008 07:33:27.888183 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-config-data\") pod \"keystone-bootstrap-7xn4v\" (UID: \"04536603-9f35-460a-b169-a462d38283b9\") " pod="openstack/keystone-bootstrap-7xn4v" Oct 08 07:33:27 crc kubenswrapper[4693]: I1008 07:33:27.989734 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-scripts\") pod \"keystone-bootstrap-7xn4v\" (UID: \"04536603-9f35-460a-b169-a462d38283b9\") " pod="openstack/keystone-bootstrap-7xn4v" Oct 08 07:33:27 crc kubenswrapper[4693]: I1008 07:33:27.989794 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-fernet-keys\") pod \"keystone-bootstrap-7xn4v\" (UID: \"04536603-9f35-460a-b169-a462d38283b9\") " pod="openstack/keystone-bootstrap-7xn4v" Oct 08 07:33:27 crc kubenswrapper[4693]: I1008 07:33:27.989834 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-credential-keys\") pod \"keystone-bootstrap-7xn4v\" (UID: \"04536603-9f35-460a-b169-a462d38283b9\") " pod="openstack/keystone-bootstrap-7xn4v" Oct 08 07:33:27 crc kubenswrapper[4693]: I1008 07:33:27.989862 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-config-data\") pod \"keystone-bootstrap-7xn4v\" (UID: \"04536603-9f35-460a-b169-a462d38283b9\") " pod="openstack/keystone-bootstrap-7xn4v" Oct 08 07:33:27 crc kubenswrapper[4693]: I1008 07:33:27.989923 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-combined-ca-bundle\") pod \"keystone-bootstrap-7xn4v\" (UID: \"04536603-9f35-460a-b169-a462d38283b9\") " pod="openstack/keystone-bootstrap-7xn4v" Oct 08 07:33:27 crc kubenswrapper[4693]: I1008 07:33:27.989967 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hr6rs\" (UniqueName: \"kubernetes.io/projected/04536603-9f35-460a-b169-a462d38283b9-kube-api-access-hr6rs\") pod \"keystone-bootstrap-7xn4v\" (UID: \"04536603-9f35-460a-b169-a462d38283b9\") " pod="openstack/keystone-bootstrap-7xn4v" Oct 08 07:33:28 crc kubenswrapper[4693]: I1008 07:33:28.010852 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-scripts\") pod \"keystone-bootstrap-7xn4v\" (UID: \"04536603-9f35-460a-b169-a462d38283b9\") " pod="openstack/keystone-bootstrap-7xn4v" Oct 08 07:33:28 crc kubenswrapper[4693]: I1008 07:33:28.011150 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-fernet-keys\") pod \"keystone-bootstrap-7xn4v\" (UID: \"04536603-9f35-460a-b169-a462d38283b9\") " pod="openstack/keystone-bootstrap-7xn4v" Oct 08 07:33:28 crc kubenswrapper[4693]: I1008 07:33:28.013556 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-credential-keys\") pod \"keystone-bootstrap-7xn4v\" (UID: \"04536603-9f35-460a-b169-a462d38283b9\") " pod="openstack/keystone-bootstrap-7xn4v" Oct 08 07:33:28 crc kubenswrapper[4693]: I1008 07:33:28.013621 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-config-data\") pod \"keystone-bootstrap-7xn4v\" (UID: \"04536603-9f35-460a-b169-a462d38283b9\") " pod="openstack/keystone-bootstrap-7xn4v" Oct 08 07:33:28 crc kubenswrapper[4693]: I1008 07:33:28.024325 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hr6rs\" (UniqueName: \"kubernetes.io/projected/04536603-9f35-460a-b169-a462d38283b9-kube-api-access-hr6rs\") pod \"keystone-bootstrap-7xn4v\" (UID: \"04536603-9f35-460a-b169-a462d38283b9\") " pod="openstack/keystone-bootstrap-7xn4v" Oct 08 07:33:28 crc kubenswrapper[4693]: I1008 07:33:28.027387 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-combined-ca-bundle\") pod \"keystone-bootstrap-7xn4v\" (UID: \"04536603-9f35-460a-b169-a462d38283b9\") " pod="openstack/keystone-bootstrap-7xn4v" Oct 08 07:33:28 crc kubenswrapper[4693]: I1008 07:33:28.093479 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7xn4v" Oct 08 07:33:28 crc kubenswrapper[4693]: I1008 07:33:28.682695 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" Oct 08 07:33:28 crc kubenswrapper[4693]: I1008 07:33:28.753226 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-c6w9b"] Oct 08 07:33:28 crc kubenswrapper[4693]: I1008 07:33:28.753461 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-c6w9b" podUID="e1409895-3bad-488c-a31c-2c4ed9b75d1c" containerName="dnsmasq-dns" containerID="cri-o://c4c49c3604b42c51ec78370f3bec626eef69ed2e477f8751170d9f819bcb6474" gracePeriod=10 Oct 08 07:33:29 crc kubenswrapper[4693]: I1008 07:33:29.394333 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bac28fa6-fc14-4384-a5c2-969c7f422bbf" path="/var/lib/kubelet/pods/bac28fa6-fc14-4384-a5c2-969c7f422bbf/volumes" Oct 08 07:33:29 crc kubenswrapper[4693]: I1008 07:33:29.785153 4693 generic.go:334] "Generic (PLEG): container finished" podID="e1409895-3bad-488c-a31c-2c4ed9b75d1c" containerID="c4c49c3604b42c51ec78370f3bec626eef69ed2e477f8751170d9f819bcb6474" exitCode=0 Oct 08 07:33:29 crc kubenswrapper[4693]: I1008 07:33:29.785216 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-c6w9b" event={"ID":"e1409895-3bad-488c-a31c-2c4ed9b75d1c","Type":"ContainerDied","Data":"c4c49c3604b42c51ec78370f3bec626eef69ed2e477f8751170d9f819bcb6474"} Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.333070 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5c7456bbdf-llw58"] Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.386616 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-647ccf6b96-zrz9s"] Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.389886 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-647ccf6b96-zrz9s"] Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.390001 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.392732 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.463285 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-748d7c6795-mlmk2"] Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.474502 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-786b4cdb4-z6p8n"] Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.477414 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-786b4cdb4-z6p8n" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.494516 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-786b4cdb4-z6p8n"] Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.566138 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-c6w9b" podUID="e1409895-3bad-488c-a31c-2c4ed9b75d1c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: connect: connection refused" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.579135 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f26734d-12eb-4c6c-9e68-254a30cea3b6-horizon-tls-certs\") pod \"horizon-786b4cdb4-z6p8n\" (UID: \"1f26734d-12eb-4c6c-9e68-254a30cea3b6\") " pod="openstack/horizon-786b4cdb4-z6p8n" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.579203 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bfzs\" (UniqueName: \"kubernetes.io/projected/1f26734d-12eb-4c6c-9e68-254a30cea3b6-kube-api-access-4bfzs\") pod \"horizon-786b4cdb4-z6p8n\" (UID: \"1f26734d-12eb-4c6c-9e68-254a30cea3b6\") " pod="openstack/horizon-786b4cdb4-z6p8n" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.579231 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1f26734d-12eb-4c6c-9e68-254a30cea3b6-horizon-secret-key\") pod \"horizon-786b4cdb4-z6p8n\" (UID: \"1f26734d-12eb-4c6c-9e68-254a30cea3b6\") " pod="openstack/horizon-786b4cdb4-z6p8n" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.579340 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kv7qs\" (UniqueName: \"kubernetes.io/projected/4c13d244-5d68-4fdc-834e-90409425f7f4-kube-api-access-kv7qs\") pod \"horizon-647ccf6b96-zrz9s\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.579432 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4c13d244-5d68-4fdc-834e-90409425f7f4-config-data\") pod \"horizon-647ccf6b96-zrz9s\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.579505 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c13d244-5d68-4fdc-834e-90409425f7f4-logs\") pod \"horizon-647ccf6b96-zrz9s\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.579530 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4c13d244-5d68-4fdc-834e-90409425f7f4-scripts\") pod \"horizon-647ccf6b96-zrz9s\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.579582 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c13d244-5d68-4fdc-834e-90409425f7f4-combined-ca-bundle\") pod \"horizon-647ccf6b96-zrz9s\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.579635 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1f26734d-12eb-4c6c-9e68-254a30cea3b6-config-data\") pod \"horizon-786b4cdb4-z6p8n\" (UID: \"1f26734d-12eb-4c6c-9e68-254a30cea3b6\") " pod="openstack/horizon-786b4cdb4-z6p8n" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.579714 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f26734d-12eb-4c6c-9e68-254a30cea3b6-logs\") pod \"horizon-786b4cdb4-z6p8n\" (UID: \"1f26734d-12eb-4c6c-9e68-254a30cea3b6\") " pod="openstack/horizon-786b4cdb4-z6p8n" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.579734 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c13d244-5d68-4fdc-834e-90409425f7f4-horizon-tls-certs\") pod \"horizon-647ccf6b96-zrz9s\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.579777 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f26734d-12eb-4c6c-9e68-254a30cea3b6-combined-ca-bundle\") pod \"horizon-786b4cdb4-z6p8n\" (UID: \"1f26734d-12eb-4c6c-9e68-254a30cea3b6\") " pod="openstack/horizon-786b4cdb4-z6p8n" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.579805 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1f26734d-12eb-4c6c-9e68-254a30cea3b6-scripts\") pod \"horizon-786b4cdb4-z6p8n\" (UID: \"1f26734d-12eb-4c6c-9e68-254a30cea3b6\") " pod="openstack/horizon-786b4cdb4-z6p8n" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.579835 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4c13d244-5d68-4fdc-834e-90409425f7f4-horizon-secret-key\") pod \"horizon-647ccf6b96-zrz9s\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.681163 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1f26734d-12eb-4c6c-9e68-254a30cea3b6-horizon-secret-key\") pod \"horizon-786b4cdb4-z6p8n\" (UID: \"1f26734d-12eb-4c6c-9e68-254a30cea3b6\") " pod="openstack/horizon-786b4cdb4-z6p8n" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.681239 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kv7qs\" (UniqueName: \"kubernetes.io/projected/4c13d244-5d68-4fdc-834e-90409425f7f4-kube-api-access-kv7qs\") pod \"horizon-647ccf6b96-zrz9s\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.681288 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4c13d244-5d68-4fdc-834e-90409425f7f4-config-data\") pod \"horizon-647ccf6b96-zrz9s\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.681323 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c13d244-5d68-4fdc-834e-90409425f7f4-logs\") pod \"horizon-647ccf6b96-zrz9s\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.681346 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4c13d244-5d68-4fdc-834e-90409425f7f4-scripts\") pod \"horizon-647ccf6b96-zrz9s\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.681374 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c13d244-5d68-4fdc-834e-90409425f7f4-combined-ca-bundle\") pod \"horizon-647ccf6b96-zrz9s\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.681411 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1f26734d-12eb-4c6c-9e68-254a30cea3b6-config-data\") pod \"horizon-786b4cdb4-z6p8n\" (UID: \"1f26734d-12eb-4c6c-9e68-254a30cea3b6\") " pod="openstack/horizon-786b4cdb4-z6p8n" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.681460 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f26734d-12eb-4c6c-9e68-254a30cea3b6-logs\") pod \"horizon-786b4cdb4-z6p8n\" (UID: \"1f26734d-12eb-4c6c-9e68-254a30cea3b6\") " pod="openstack/horizon-786b4cdb4-z6p8n" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.681481 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c13d244-5d68-4fdc-834e-90409425f7f4-horizon-tls-certs\") pod \"horizon-647ccf6b96-zrz9s\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.681514 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f26734d-12eb-4c6c-9e68-254a30cea3b6-combined-ca-bundle\") pod \"horizon-786b4cdb4-z6p8n\" (UID: \"1f26734d-12eb-4c6c-9e68-254a30cea3b6\") " pod="openstack/horizon-786b4cdb4-z6p8n" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.681541 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1f26734d-12eb-4c6c-9e68-254a30cea3b6-scripts\") pod \"horizon-786b4cdb4-z6p8n\" (UID: \"1f26734d-12eb-4c6c-9e68-254a30cea3b6\") " pod="openstack/horizon-786b4cdb4-z6p8n" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.681563 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4c13d244-5d68-4fdc-834e-90409425f7f4-horizon-secret-key\") pod \"horizon-647ccf6b96-zrz9s\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.681591 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f26734d-12eb-4c6c-9e68-254a30cea3b6-horizon-tls-certs\") pod \"horizon-786b4cdb4-z6p8n\" (UID: \"1f26734d-12eb-4c6c-9e68-254a30cea3b6\") " pod="openstack/horizon-786b4cdb4-z6p8n" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.681639 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bfzs\" (UniqueName: \"kubernetes.io/projected/1f26734d-12eb-4c6c-9e68-254a30cea3b6-kube-api-access-4bfzs\") pod \"horizon-786b4cdb4-z6p8n\" (UID: \"1f26734d-12eb-4c6c-9e68-254a30cea3b6\") " pod="openstack/horizon-786b4cdb4-z6p8n" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.682198 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f26734d-12eb-4c6c-9e68-254a30cea3b6-logs\") pod \"horizon-786b4cdb4-z6p8n\" (UID: \"1f26734d-12eb-4c6c-9e68-254a30cea3b6\") " pod="openstack/horizon-786b4cdb4-z6p8n" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.682763 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1f26734d-12eb-4c6c-9e68-254a30cea3b6-scripts\") pod \"horizon-786b4cdb4-z6p8n\" (UID: \"1f26734d-12eb-4c6c-9e68-254a30cea3b6\") " pod="openstack/horizon-786b4cdb4-z6p8n" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.683477 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4c13d244-5d68-4fdc-834e-90409425f7f4-config-data\") pod \"horizon-647ccf6b96-zrz9s\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.683779 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4c13d244-5d68-4fdc-834e-90409425f7f4-scripts\") pod \"horizon-647ccf6b96-zrz9s\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.684049 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c13d244-5d68-4fdc-834e-90409425f7f4-logs\") pod \"horizon-647ccf6b96-zrz9s\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.684976 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1f26734d-12eb-4c6c-9e68-254a30cea3b6-config-data\") pod \"horizon-786b4cdb4-z6p8n\" (UID: \"1f26734d-12eb-4c6c-9e68-254a30cea3b6\") " pod="openstack/horizon-786b4cdb4-z6p8n" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.688066 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f26734d-12eb-4c6c-9e68-254a30cea3b6-horizon-tls-certs\") pod \"horizon-786b4cdb4-z6p8n\" (UID: \"1f26734d-12eb-4c6c-9e68-254a30cea3b6\") " pod="openstack/horizon-786b4cdb4-z6p8n" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.688731 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4c13d244-5d68-4fdc-834e-90409425f7f4-horizon-secret-key\") pod \"horizon-647ccf6b96-zrz9s\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.689370 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c13d244-5d68-4fdc-834e-90409425f7f4-combined-ca-bundle\") pod \"horizon-647ccf6b96-zrz9s\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.690549 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c13d244-5d68-4fdc-834e-90409425f7f4-horizon-tls-certs\") pod \"horizon-647ccf6b96-zrz9s\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.692056 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f26734d-12eb-4c6c-9e68-254a30cea3b6-combined-ca-bundle\") pod \"horizon-786b4cdb4-z6p8n\" (UID: \"1f26734d-12eb-4c6c-9e68-254a30cea3b6\") " pod="openstack/horizon-786b4cdb4-z6p8n" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.698545 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1f26734d-12eb-4c6c-9e68-254a30cea3b6-horizon-secret-key\") pod \"horizon-786b4cdb4-z6p8n\" (UID: \"1f26734d-12eb-4c6c-9e68-254a30cea3b6\") " pod="openstack/horizon-786b4cdb4-z6p8n" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.701533 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kv7qs\" (UniqueName: \"kubernetes.io/projected/4c13d244-5d68-4fdc-834e-90409425f7f4-kube-api-access-kv7qs\") pod \"horizon-647ccf6b96-zrz9s\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.703139 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bfzs\" (UniqueName: \"kubernetes.io/projected/1f26734d-12eb-4c6c-9e68-254a30cea3b6-kube-api-access-4bfzs\") pod \"horizon-786b4cdb4-z6p8n\" (UID: \"1f26734d-12eb-4c6c-9e68-254a30cea3b6\") " pod="openstack/horizon-786b4cdb4-z6p8n" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.717213 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:33:31 crc kubenswrapper[4693]: I1008 07:33:31.801609 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-786b4cdb4-z6p8n" Oct 08 07:33:35 crc kubenswrapper[4693]: E1008 07:33:35.257312 4693 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Oct 08 07:33:35 crc kubenswrapper[4693]: E1008 07:33:35.257850 4693 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5dch559h595h74h57h5fchd6h64bh66h5dch68bh66dh54bh75h7h5d7h557h57fh55hb6h699h5bhbbh76h56chc9h4h5b8hf4hbfh555h65dq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-h7s6p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-597998b4bf-mrxcd_openstack(3ccf4c22-4fd6-4f83-90c3-2830af4f900f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 08 07:33:35 crc kubenswrapper[4693]: E1008 07:33:35.262343 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-597998b4bf-mrxcd" podUID="3ccf4c22-4fd6-4f83-90c3-2830af4f900f" Oct 08 07:33:36 crc kubenswrapper[4693]: I1008 07:33:36.564793 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-c6w9b" podUID="e1409895-3bad-488c-a31c-2c4ed9b75d1c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: connect: connection refused" Oct 08 07:33:38 crc kubenswrapper[4693]: E1008 07:33:38.684128 4693 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-placement-api:current-podified" Oct 08 07:33:38 crc kubenswrapper[4693]: E1008 07:33:38.684557 4693 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vgg9b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-m4xlg_openstack(b4d301e9-d078-4876-a6a2-52a7c3b4dcbe): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 08 07:33:38 crc kubenswrapper[4693]: E1008 07:33:38.685806 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-m4xlg" podUID="b4d301e9-d078-4876-a6a2-52a7c3b4dcbe" Oct 08 07:33:38 crc kubenswrapper[4693]: E1008 07:33:38.724669 4693 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Oct 08 07:33:38 crc kubenswrapper[4693]: E1008 07:33:38.724956 4693 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n94h657h658h66fh55fh567h695hb6h688h66h5f6h58bh584h76h6h695hd8h5f6h59bhbbhdbh55fh5ddhb8hdh694h564hc6h564h8h57bhbfq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zmk79,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-5c7456bbdf-llw58_openstack(8c65eaa0-7e61-4044-99d2-c61192d02cae): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 08 07:33:38 crc kubenswrapper[4693]: E1008 07:33:38.727793 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-5c7456bbdf-llw58" podUID="8c65eaa0-7e61-4044-99d2-c61192d02cae" Oct 08 07:33:38 crc kubenswrapper[4693]: I1008 07:33:38.813377 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-597998b4bf-mrxcd" Oct 08 07:33:38 crc kubenswrapper[4693]: I1008 07:33:38.823780 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 08 07:33:38 crc kubenswrapper[4693]: I1008 07:33:38.876291 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-597998b4bf-mrxcd" event={"ID":"3ccf4c22-4fd6-4f83-90c3-2830af4f900f","Type":"ContainerDied","Data":"d3c57b9a0cc2f7cd37dec8a68fe80b938f8986625037ada3732d528d08d7d5cc"} Oct 08 07:33:38 crc kubenswrapper[4693]: I1008 07:33:38.876392 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-597998b4bf-mrxcd" Oct 08 07:33:38 crc kubenswrapper[4693]: I1008 07:33:38.887088 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 08 07:33:38 crc kubenswrapper[4693]: I1008 07:33:38.887525 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f85cc949-c985-4eaf-ad5a-be8936e8b4be","Type":"ContainerDied","Data":"5e22efa897dac524a6684f63c0570eadf6c2bb3a5ceb742981133b7ecb17fc03"} Oct 08 07:33:38 crc kubenswrapper[4693]: I1008 07:33:38.887598 4693 scope.go:117] "RemoveContainer" containerID="9bb9dc103f93446e8fb1064c329a53072f6a3a6dab6deacf47d85793e68b6918" Oct 08 07:33:38 crc kubenswrapper[4693]: E1008 07:33:38.888743 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-placement-api:current-podified\\\"\"" pod="openstack/placement-db-sync-m4xlg" podUID="b4d301e9-d078-4876-a6a2-52a7c3b4dcbe" Oct 08 07:33:38 crc kubenswrapper[4693]: I1008 07:33:38.924784 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-logs\") pod \"3ccf4c22-4fd6-4f83-90c3-2830af4f900f\" (UID: \"3ccf4c22-4fd6-4f83-90c3-2830af4f900f\") " Oct 08 07:33:38 crc kubenswrapper[4693]: I1008 07:33:38.925015 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-config-data\") pod \"3ccf4c22-4fd6-4f83-90c3-2830af4f900f\" (UID: \"3ccf4c22-4fd6-4f83-90c3-2830af4f900f\") " Oct 08 07:33:38 crc kubenswrapper[4693]: I1008 07:33:38.925123 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-logs" (OuterVolumeSpecName: "logs") pod "3ccf4c22-4fd6-4f83-90c3-2830af4f900f" (UID: "3ccf4c22-4fd6-4f83-90c3-2830af4f900f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:33:38 crc kubenswrapper[4693]: I1008 07:33:38.925135 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7s6p\" (UniqueName: \"kubernetes.io/projected/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-kube-api-access-h7s6p\") pod \"3ccf4c22-4fd6-4f83-90c3-2830af4f900f\" (UID: \"3ccf4c22-4fd6-4f83-90c3-2830af4f900f\") " Oct 08 07:33:38 crc kubenswrapper[4693]: I1008 07:33:38.925254 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-scripts\") pod \"3ccf4c22-4fd6-4f83-90c3-2830af4f900f\" (UID: \"3ccf4c22-4fd6-4f83-90c3-2830af4f900f\") " Oct 08 07:33:38 crc kubenswrapper[4693]: I1008 07:33:38.925358 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-horizon-secret-key\") pod \"3ccf4c22-4fd6-4f83-90c3-2830af4f900f\" (UID: \"3ccf4c22-4fd6-4f83-90c3-2830af4f900f\") " Oct 08 07:33:38 crc kubenswrapper[4693]: I1008 07:33:38.925912 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-config-data" (OuterVolumeSpecName: "config-data") pod "3ccf4c22-4fd6-4f83-90c3-2830af4f900f" (UID: "3ccf4c22-4fd6-4f83-90c3-2830af4f900f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:33:38 crc kubenswrapper[4693]: I1008 07:33:38.925926 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-scripts" (OuterVolumeSpecName: "scripts") pod "3ccf4c22-4fd6-4f83-90c3-2830af4f900f" (UID: "3ccf4c22-4fd6-4f83-90c3-2830af4f900f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:33:38 crc kubenswrapper[4693]: I1008 07:33:38.926243 4693 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-logs\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:38 crc kubenswrapper[4693]: I1008 07:33:38.926264 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:38 crc kubenswrapper[4693]: I1008 07:33:38.926273 4693 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-scripts\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:38 crc kubenswrapper[4693]: I1008 07:33:38.937044 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-kube-api-access-h7s6p" (OuterVolumeSpecName: "kube-api-access-h7s6p") pod "3ccf4c22-4fd6-4f83-90c3-2830af4f900f" (UID: "3ccf4c22-4fd6-4f83-90c3-2830af4f900f"). InnerVolumeSpecName "kube-api-access-h7s6p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:33:38 crc kubenswrapper[4693]: I1008 07:33:38.954075 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "3ccf4c22-4fd6-4f83-90c3-2830af4f900f" (UID: "3ccf4c22-4fd6-4f83-90c3-2830af4f900f"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.027306 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f85cc949-c985-4eaf-ad5a-be8936e8b4be-combined-ca-bundle\") pod \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.027634 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.027671 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9469c\" (UniqueName: \"kubernetes.io/projected/f85cc949-c985-4eaf-ad5a-be8936e8b4be-kube-api-access-9469c\") pod \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.027741 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f85cc949-c985-4eaf-ad5a-be8936e8b4be-logs\") pod \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.027777 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f85cc949-c985-4eaf-ad5a-be8936e8b4be-config-data\") pod \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.027835 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f85cc949-c985-4eaf-ad5a-be8936e8b4be-httpd-run\") pod \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.027861 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f85cc949-c985-4eaf-ad5a-be8936e8b4be-scripts\") pod \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\" (UID: \"f85cc949-c985-4eaf-ad5a-be8936e8b4be\") " Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.028158 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7s6p\" (UniqueName: \"kubernetes.io/projected/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-kube-api-access-h7s6p\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.028174 4693 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3ccf4c22-4fd6-4f83-90c3-2830af4f900f-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.028295 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f85cc949-c985-4eaf-ad5a-be8936e8b4be-logs" (OuterVolumeSpecName: "logs") pod "f85cc949-c985-4eaf-ad5a-be8936e8b4be" (UID: "f85cc949-c985-4eaf-ad5a-be8936e8b4be"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.028479 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f85cc949-c985-4eaf-ad5a-be8936e8b4be-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "f85cc949-c985-4eaf-ad5a-be8936e8b4be" (UID: "f85cc949-c985-4eaf-ad5a-be8936e8b4be"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.031245 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f85cc949-c985-4eaf-ad5a-be8936e8b4be-scripts" (OuterVolumeSpecName: "scripts") pod "f85cc949-c985-4eaf-ad5a-be8936e8b4be" (UID: "f85cc949-c985-4eaf-ad5a-be8936e8b4be"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.034476 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "f85cc949-c985-4eaf-ad5a-be8936e8b4be" (UID: "f85cc949-c985-4eaf-ad5a-be8936e8b4be"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.034983 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f85cc949-c985-4eaf-ad5a-be8936e8b4be-kube-api-access-9469c" (OuterVolumeSpecName: "kube-api-access-9469c") pod "f85cc949-c985-4eaf-ad5a-be8936e8b4be" (UID: "f85cc949-c985-4eaf-ad5a-be8936e8b4be"). InnerVolumeSpecName "kube-api-access-9469c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.069510 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f85cc949-c985-4eaf-ad5a-be8936e8b4be-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f85cc949-c985-4eaf-ad5a-be8936e8b4be" (UID: "f85cc949-c985-4eaf-ad5a-be8936e8b4be"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.099064 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f85cc949-c985-4eaf-ad5a-be8936e8b4be-config-data" (OuterVolumeSpecName: "config-data") pod "f85cc949-c985-4eaf-ad5a-be8936e8b4be" (UID: "f85cc949-c985-4eaf-ad5a-be8936e8b4be"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.129262 4693 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f85cc949-c985-4eaf-ad5a-be8936e8b4be-logs\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.129296 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f85cc949-c985-4eaf-ad5a-be8936e8b4be-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.129304 4693 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f85cc949-c985-4eaf-ad5a-be8936e8b4be-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.129313 4693 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f85cc949-c985-4eaf-ad5a-be8936e8b4be-scripts\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.129321 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f85cc949-c985-4eaf-ad5a-be8936e8b4be-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.129351 4693 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.129363 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9469c\" (UniqueName: \"kubernetes.io/projected/f85cc949-c985-4eaf-ad5a-be8936e8b4be-kube-api-access-9469c\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.149768 4693 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.232486 4693 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.286225 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-597998b4bf-mrxcd"] Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.295667 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-597998b4bf-mrxcd"] Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.304243 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.310099 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.319174 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 08 07:33:39 crc kubenswrapper[4693]: E1008 07:33:39.319596 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85cc949-c985-4eaf-ad5a-be8936e8b4be" containerName="glance-httpd" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.319614 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85cc949-c985-4eaf-ad5a-be8936e8b4be" containerName="glance-httpd" Oct 08 07:33:39 crc kubenswrapper[4693]: E1008 07:33:39.319637 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85cc949-c985-4eaf-ad5a-be8936e8b4be" containerName="glance-log" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.319643 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85cc949-c985-4eaf-ad5a-be8936e8b4be" containerName="glance-log" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.319807 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85cc949-c985-4eaf-ad5a-be8936e8b4be" containerName="glance-httpd" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.319838 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85cc949-c985-4eaf-ad5a-be8936e8b4be" containerName="glance-log" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.320709 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.322608 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.322752 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.331935 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.375026 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ccf4c22-4fd6-4f83-90c3-2830af4f900f" path="/var/lib/kubelet/pods/3ccf4c22-4fd6-4f83-90c3-2830af4f900f/volumes" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.375713 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85cc949-c985-4eaf-ad5a-be8936e8b4be" path="/var/lib/kubelet/pods/f85cc949-c985-4eaf-ad5a-be8936e8b4be/volumes" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.436099 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/210e5669-78d1-47cb-8e46-e00c9764c2c7-logs\") pod \"glance-default-external-api-0\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.436149 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.436363 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/210e5669-78d1-47cb-8e46-e00c9764c2c7-config-data\") pod \"glance-default-external-api-0\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.436490 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/210e5669-78d1-47cb-8e46-e00c9764c2c7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.436578 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/210e5669-78d1-47cb-8e46-e00c9764c2c7-scripts\") pod \"glance-default-external-api-0\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.436689 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtqw9\" (UniqueName: \"kubernetes.io/projected/210e5669-78d1-47cb-8e46-e00c9764c2c7-kube-api-access-jtqw9\") pod \"glance-default-external-api-0\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.436727 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/210e5669-78d1-47cb-8e46-e00c9764c2c7-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.436761 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/210e5669-78d1-47cb-8e46-e00c9764c2c7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.538387 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/210e5669-78d1-47cb-8e46-e00c9764c2c7-config-data\") pod \"glance-default-external-api-0\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.538458 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/210e5669-78d1-47cb-8e46-e00c9764c2c7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.538497 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/210e5669-78d1-47cb-8e46-e00c9764c2c7-scripts\") pod \"glance-default-external-api-0\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.538527 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtqw9\" (UniqueName: \"kubernetes.io/projected/210e5669-78d1-47cb-8e46-e00c9764c2c7-kube-api-access-jtqw9\") pod \"glance-default-external-api-0\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.538558 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/210e5669-78d1-47cb-8e46-e00c9764c2c7-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.538578 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/210e5669-78d1-47cb-8e46-e00c9764c2c7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.538619 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/210e5669-78d1-47cb-8e46-e00c9764c2c7-logs\") pod \"glance-default-external-api-0\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.538645 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.538758 4693 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-external-api-0" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.539683 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/210e5669-78d1-47cb-8e46-e00c9764c2c7-logs\") pod \"glance-default-external-api-0\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.539704 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/210e5669-78d1-47cb-8e46-e00c9764c2c7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.543238 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/210e5669-78d1-47cb-8e46-e00c9764c2c7-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.543267 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/210e5669-78d1-47cb-8e46-e00c9764c2c7-scripts\") pod \"glance-default-external-api-0\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.543701 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/210e5669-78d1-47cb-8e46-e00c9764c2c7-config-data\") pod \"glance-default-external-api-0\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.548461 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/210e5669-78d1-47cb-8e46-e00c9764c2c7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.557568 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtqw9\" (UniqueName: \"kubernetes.io/projected/210e5669-78d1-47cb-8e46-e00c9764c2c7-kube-api-access-jtqw9\") pod \"glance-default-external-api-0\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.566528 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " pod="openstack/glance-default-external-api-0" Oct 08 07:33:39 crc kubenswrapper[4693]: I1008 07:33:39.647299 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 08 07:33:41 crc kubenswrapper[4693]: I1008 07:33:41.565011 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-c6w9b" podUID="e1409895-3bad-488c-a31c-2c4ed9b75d1c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: connect: connection refused" Oct 08 07:33:41 crc kubenswrapper[4693]: I1008 07:33:41.566083 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-c6w9b" Oct 08 07:33:41 crc kubenswrapper[4693]: E1008 07:33:41.771629 4693 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Oct 08 07:33:41 crc kubenswrapper[4693]: E1008 07:33:41.771919 4693 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xcpj6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-rxtmf_openstack(79884c8c-f689-46b7-9223-66bd0b7bff8e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 08 07:33:41 crc kubenswrapper[4693]: E1008 07:33:41.773558 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-rxtmf" podUID="79884c8c-f689-46b7-9223-66bd0b7bff8e" Oct 08 07:33:41 crc kubenswrapper[4693]: E1008 07:33:41.924845 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-rxtmf" podUID="79884c8c-f689-46b7-9223-66bd0b7bff8e" Oct 08 07:33:46 crc kubenswrapper[4693]: I1008 07:33:46.969515 4693 generic.go:334] "Generic (PLEG): container finished" podID="21be0841-c23b-4e2a-96dd-eebb788a1104" containerID="c6093f0600894f583cae174d5ee703161897f41507011bfd222d6d824898c7a1" exitCode=0 Oct 08 07:33:46 crc kubenswrapper[4693]: I1008 07:33:46.970387 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-trdh5" event={"ID":"21be0841-c23b-4e2a-96dd-eebb788a1104","Type":"ContainerDied","Data":"c6093f0600894f583cae174d5ee703161897f41507011bfd222d6d824898c7a1"} Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.606800 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.624213 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5c7456bbdf-llw58" Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.744668 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zmk79\" (UniqueName: \"kubernetes.io/projected/8c65eaa0-7e61-4044-99d2-c61192d02cae-kube-api-access-zmk79\") pod \"8c65eaa0-7e61-4044-99d2-c61192d02cae\" (UID: \"8c65eaa0-7e61-4044-99d2-c61192d02cae\") " Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.744729 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8c65eaa0-7e61-4044-99d2-c61192d02cae-horizon-secret-key\") pod \"8c65eaa0-7e61-4044-99d2-c61192d02cae\" (UID: \"8c65eaa0-7e61-4044-99d2-c61192d02cae\") " Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.744778 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-config-data\") pod \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.744800 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.744855 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8f5d2\" (UniqueName: \"kubernetes.io/projected/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-kube-api-access-8f5d2\") pod \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.744908 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-combined-ca-bundle\") pod \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.744944 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c65eaa0-7e61-4044-99d2-c61192d02cae-logs\") pod \"8c65eaa0-7e61-4044-99d2-c61192d02cae\" (UID: \"8c65eaa0-7e61-4044-99d2-c61192d02cae\") " Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.744994 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8c65eaa0-7e61-4044-99d2-c61192d02cae-scripts\") pod \"8c65eaa0-7e61-4044-99d2-c61192d02cae\" (UID: \"8c65eaa0-7e61-4044-99d2-c61192d02cae\") " Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.745022 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-logs\") pod \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.745042 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-scripts\") pod \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.745120 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8c65eaa0-7e61-4044-99d2-c61192d02cae-config-data\") pod \"8c65eaa0-7e61-4044-99d2-c61192d02cae\" (UID: \"8c65eaa0-7e61-4044-99d2-c61192d02cae\") " Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.745149 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-httpd-run\") pod \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\" (UID: \"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb\") " Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.745555 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c65eaa0-7e61-4044-99d2-c61192d02cae-logs" (OuterVolumeSpecName: "logs") pod "8c65eaa0-7e61-4044-99d2-c61192d02cae" (UID: "8c65eaa0-7e61-4044-99d2-c61192d02cae"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.746098 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-logs" (OuterVolumeSpecName: "logs") pod "9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb" (UID: "9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.746858 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c65eaa0-7e61-4044-99d2-c61192d02cae-scripts" (OuterVolumeSpecName: "scripts") pod "8c65eaa0-7e61-4044-99d2-c61192d02cae" (UID: "8c65eaa0-7e61-4044-99d2-c61192d02cae"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.746886 4693 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c65eaa0-7e61-4044-99d2-c61192d02cae-logs\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.746905 4693 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-logs\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.746988 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c65eaa0-7e61-4044-99d2-c61192d02cae-config-data" (OuterVolumeSpecName: "config-data") pod "8c65eaa0-7e61-4044-99d2-c61192d02cae" (UID: "8c65eaa0-7e61-4044-99d2-c61192d02cae"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.747538 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb" (UID: "9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.752925 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c65eaa0-7e61-4044-99d2-c61192d02cae-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "8c65eaa0-7e61-4044-99d2-c61192d02cae" (UID: "8c65eaa0-7e61-4044-99d2-c61192d02cae"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.754396 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb" (UID: "9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.755632 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-kube-api-access-8f5d2" (OuterVolumeSpecName: "kube-api-access-8f5d2") pod "9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb" (UID: "9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb"). InnerVolumeSpecName "kube-api-access-8f5d2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.758029 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c65eaa0-7e61-4044-99d2-c61192d02cae-kube-api-access-zmk79" (OuterVolumeSpecName: "kube-api-access-zmk79") pod "8c65eaa0-7e61-4044-99d2-c61192d02cae" (UID: "8c65eaa0-7e61-4044-99d2-c61192d02cae"). InnerVolumeSpecName "kube-api-access-zmk79". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.766944 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-scripts" (OuterVolumeSpecName: "scripts") pod "9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb" (UID: "9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.786731 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb" (UID: "9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.796151 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-config-data" (OuterVolumeSpecName: "config-data") pod "9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb" (UID: "9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.849722 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8c65eaa0-7e61-4044-99d2-c61192d02cae-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.849748 4693 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.849758 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zmk79\" (UniqueName: \"kubernetes.io/projected/8c65eaa0-7e61-4044-99d2-c61192d02cae-kube-api-access-zmk79\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.849767 4693 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8c65eaa0-7e61-4044-99d2-c61192d02cae-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.849775 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.849803 4693 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.849817 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8f5d2\" (UniqueName: \"kubernetes.io/projected/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-kube-api-access-8f5d2\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.849843 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.849854 4693 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8c65eaa0-7e61-4044-99d2-c61192d02cae-scripts\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.849862 4693 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb-scripts\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.867765 4693 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.951004 4693 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:49 crc kubenswrapper[4693]: E1008 07:33:49.982430 4693 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Oct 08 07:33:49 crc kubenswrapper[4693]: E1008 07:33:49.982585 4693 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nb4h546h59bh69hbdh7dh688h658h5h5d8hcchc5h665h5dfh586h5f7h5c4hffh5b6h56dh54h5f6hd5hf4h596h54bh654h8bh59fh74h64fh679q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-b6ff9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(019964d2-ae2a-4501-ad53-c6c0b2260a32): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 08 07:33:49 crc kubenswrapper[4693]: I1008 07:33:49.991737 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-c6w9b" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.005295 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-trdh5" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.006118 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb","Type":"ContainerDied","Data":"a72b64f002dbaa6cbdb7d5834c224ed3b0f20257c79e7a68e1671be2ef0a14e6"} Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.006125 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.023871 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-c6w9b" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.024266 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-c6w9b" event={"ID":"e1409895-3bad-488c-a31c-2c4ed9b75d1c","Type":"ContainerDied","Data":"26507a9e77794d1a4247652f7376a15f18d536e2037432b292ce555c208b2a86"} Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.027678 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-trdh5" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.027724 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-trdh5" event={"ID":"21be0841-c23b-4e2a-96dd-eebb788a1104","Type":"ContainerDied","Data":"0cb5334842add8d97255687a0e6acde820058235b5ba8c80853a737971839a14"} Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.027762 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0cb5334842add8d97255687a0e6acde820058235b5ba8c80853a737971839a14" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.029504 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5c7456bbdf-llw58" event={"ID":"8c65eaa0-7e61-4044-99d2-c61192d02cae","Type":"ContainerDied","Data":"30f3beeca361f2f330453bf566191104dd7cd5d4efa2406d18591e388be289f5"} Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.029555 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5c7456bbdf-llw58" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.100034 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.108001 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.121975 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 08 07:33:50 crc kubenswrapper[4693]: E1008 07:33:50.122476 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb" containerName="glance-httpd" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.122511 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb" containerName="glance-httpd" Oct 08 07:33:50 crc kubenswrapper[4693]: E1008 07:33:50.122525 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1409895-3bad-488c-a31c-2c4ed9b75d1c" containerName="init" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.122533 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1409895-3bad-488c-a31c-2c4ed9b75d1c" containerName="init" Oct 08 07:33:50 crc kubenswrapper[4693]: E1008 07:33:50.122547 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb" containerName="glance-log" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.122553 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb" containerName="glance-log" Oct 08 07:33:50 crc kubenswrapper[4693]: E1008 07:33:50.122623 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21be0841-c23b-4e2a-96dd-eebb788a1104" containerName="neutron-db-sync" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.122630 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="21be0841-c23b-4e2a-96dd-eebb788a1104" containerName="neutron-db-sync" Oct 08 07:33:50 crc kubenswrapper[4693]: E1008 07:33:50.122653 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1409895-3bad-488c-a31c-2c4ed9b75d1c" containerName="dnsmasq-dns" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.122659 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1409895-3bad-488c-a31c-2c4ed9b75d1c" containerName="dnsmasq-dns" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.122888 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb" containerName="glance-log" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.122930 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb" containerName="glance-httpd" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.122941 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1409895-3bad-488c-a31c-2c4ed9b75d1c" containerName="dnsmasq-dns" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.122952 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="21be0841-c23b-4e2a-96dd-eebb788a1104" containerName="neutron-db-sync" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.126853 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.133470 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.136365 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.136486 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.159228 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/21be0841-c23b-4e2a-96dd-eebb788a1104-config\") pod \"21be0841-c23b-4e2a-96dd-eebb788a1104\" (UID: \"21be0841-c23b-4e2a-96dd-eebb788a1104\") " Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.159283 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1409895-3bad-488c-a31c-2c4ed9b75d1c-config\") pod \"e1409895-3bad-488c-a31c-2c4ed9b75d1c\" (UID: \"e1409895-3bad-488c-a31c-2c4ed9b75d1c\") " Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.159330 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21be0841-c23b-4e2a-96dd-eebb788a1104-combined-ca-bundle\") pod \"21be0841-c23b-4e2a-96dd-eebb788a1104\" (UID: \"21be0841-c23b-4e2a-96dd-eebb788a1104\") " Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.159449 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qr9sh\" (UniqueName: \"kubernetes.io/projected/e1409895-3bad-488c-a31c-2c4ed9b75d1c-kube-api-access-qr9sh\") pod \"e1409895-3bad-488c-a31c-2c4ed9b75d1c\" (UID: \"e1409895-3bad-488c-a31c-2c4ed9b75d1c\") " Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.159535 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e1409895-3bad-488c-a31c-2c4ed9b75d1c-ovsdbserver-nb\") pod \"e1409895-3bad-488c-a31c-2c4ed9b75d1c\" (UID: \"e1409895-3bad-488c-a31c-2c4ed9b75d1c\") " Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.159566 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e1409895-3bad-488c-a31c-2c4ed9b75d1c-dns-svc\") pod \"e1409895-3bad-488c-a31c-2c4ed9b75d1c\" (UID: \"e1409895-3bad-488c-a31c-2c4ed9b75d1c\") " Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.159594 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l4zcz\" (UniqueName: \"kubernetes.io/projected/21be0841-c23b-4e2a-96dd-eebb788a1104-kube-api-access-l4zcz\") pod \"21be0841-c23b-4e2a-96dd-eebb788a1104\" (UID: \"21be0841-c23b-4e2a-96dd-eebb788a1104\") " Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.159629 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e1409895-3bad-488c-a31c-2c4ed9b75d1c-ovsdbserver-sb\") pod \"e1409895-3bad-488c-a31c-2c4ed9b75d1c\" (UID: \"e1409895-3bad-488c-a31c-2c4ed9b75d1c\") " Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.179328 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5c7456bbdf-llw58"] Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.183582 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21be0841-c23b-4e2a-96dd-eebb788a1104-config" (OuterVolumeSpecName: "config") pod "21be0841-c23b-4e2a-96dd-eebb788a1104" (UID: "21be0841-c23b-4e2a-96dd-eebb788a1104"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.184108 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21be0841-c23b-4e2a-96dd-eebb788a1104-kube-api-access-l4zcz" (OuterVolumeSpecName: "kube-api-access-l4zcz") pod "21be0841-c23b-4e2a-96dd-eebb788a1104" (UID: "21be0841-c23b-4e2a-96dd-eebb788a1104"). InnerVolumeSpecName "kube-api-access-l4zcz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.186872 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1409895-3bad-488c-a31c-2c4ed9b75d1c-kube-api-access-qr9sh" (OuterVolumeSpecName: "kube-api-access-qr9sh") pod "e1409895-3bad-488c-a31c-2c4ed9b75d1c" (UID: "e1409895-3bad-488c-a31c-2c4ed9b75d1c"). InnerVolumeSpecName "kube-api-access-qr9sh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.188242 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-5c7456bbdf-llw58"] Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.217667 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21be0841-c23b-4e2a-96dd-eebb788a1104-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "21be0841-c23b-4e2a-96dd-eebb788a1104" (UID: "21be0841-c23b-4e2a-96dd-eebb788a1104"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.219273 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1409895-3bad-488c-a31c-2c4ed9b75d1c-config" (OuterVolumeSpecName: "config") pod "e1409895-3bad-488c-a31c-2c4ed9b75d1c" (UID: "e1409895-3bad-488c-a31c-2c4ed9b75d1c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.226858 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1409895-3bad-488c-a31c-2c4ed9b75d1c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e1409895-3bad-488c-a31c-2c4ed9b75d1c" (UID: "e1409895-3bad-488c-a31c-2c4ed9b75d1c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.237414 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1409895-3bad-488c-a31c-2c4ed9b75d1c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e1409895-3bad-488c-a31c-2c4ed9b75d1c" (UID: "e1409895-3bad-488c-a31c-2c4ed9b75d1c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.241837 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1409895-3bad-488c-a31c-2c4ed9b75d1c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e1409895-3bad-488c-a31c-2c4ed9b75d1c" (UID: "e1409895-3bad-488c-a31c-2c4ed9b75d1c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.262912 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-logs\") pod \"glance-default-internal-api-0\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.262978 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ks6z\" (UniqueName: \"kubernetes.io/projected/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-kube-api-access-5ks6z\") pod \"glance-default-internal-api-0\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.263050 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-scripts\") pod \"glance-default-internal-api-0\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.263087 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.263106 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.263138 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.263160 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-config-data\") pod \"glance-default-internal-api-0\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.263360 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.263580 4693 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e1409895-3bad-488c-a31c-2c4ed9b75d1c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.263663 4693 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e1409895-3bad-488c-a31c-2c4ed9b75d1c-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.263681 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l4zcz\" (UniqueName: \"kubernetes.io/projected/21be0841-c23b-4e2a-96dd-eebb788a1104-kube-api-access-l4zcz\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.263694 4693 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e1409895-3bad-488c-a31c-2c4ed9b75d1c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.263704 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/21be0841-c23b-4e2a-96dd-eebb788a1104-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.263715 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1409895-3bad-488c-a31c-2c4ed9b75d1c-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.263726 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21be0841-c23b-4e2a-96dd-eebb788a1104-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.263736 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qr9sh\" (UniqueName: \"kubernetes.io/projected/e1409895-3bad-488c-a31c-2c4ed9b75d1c-kube-api-access-qr9sh\") on node \"crc\" DevicePath \"\"" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.359326 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-c6w9b"] Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.364711 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.364763 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-logs\") pod \"glance-default-internal-api-0\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.364797 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ks6z\" (UniqueName: \"kubernetes.io/projected/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-kube-api-access-5ks6z\") pod \"glance-default-internal-api-0\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.364929 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-scripts\") pod \"glance-default-internal-api-0\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.364973 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.364996 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.365035 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.365057 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-config-data\") pod \"glance-default-internal-api-0\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.365158 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-logs\") pod \"glance-default-internal-api-0\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.364978 4693 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-internal-api-0" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.365383 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.365423 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-c6w9b"] Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.368744 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.369983 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-config-data\") pod \"glance-default-internal-api-0\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.370929 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.373654 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-scripts\") pod \"glance-default-internal-api-0\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.382579 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ks6z\" (UniqueName: \"kubernetes.io/projected/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-kube-api-access-5ks6z\") pod \"glance-default-internal-api-0\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.388025 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:33:50 crc kubenswrapper[4693]: I1008 07:33:50.461283 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.277232 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-k8rpf"] Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.279347 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.304244 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-k8rpf"] Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.386561 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-k8rpf\" (UID: \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\") " pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.386596 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-274cl\" (UniqueName: \"kubernetes.io/projected/9ec230f9-e0bd-4760-aeef-08d63ef6b795-kube-api-access-274cl\") pod \"dnsmasq-dns-55f844cf75-k8rpf\" (UID: \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\") " pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.386676 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-dns-svc\") pod \"dnsmasq-dns-55f844cf75-k8rpf\" (UID: \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\") " pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.386693 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-k8rpf\" (UID: \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\") " pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.386745 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-k8rpf\" (UID: \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\") " pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.386762 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-config\") pod \"dnsmasq-dns-55f844cf75-k8rpf\" (UID: \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\") " pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.392817 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c65eaa0-7e61-4044-99d2-c61192d02cae" path="/var/lib/kubelet/pods/8c65eaa0-7e61-4044-99d2-c61192d02cae/volumes" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.400549 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb" path="/var/lib/kubelet/pods/9d6ffe39-0b01-40e4-aaee-8cdab02e2bbb/volumes" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.401516 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1409895-3bad-488c-a31c-2c4ed9b75d1c" path="/var/lib/kubelet/pods/e1409895-3bad-488c-a31c-2c4ed9b75d1c/volumes" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.412973 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7fd7c44d-xwrpr"] Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.413584 4693 scope.go:117] "RemoveContainer" containerID="ddf931381edfa2ec41c19ffeea420c0679bc749718ebbacb12ac413dfc95cac7" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.414563 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7fd7c44d-xwrpr" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.419501 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-g7qwr" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.419631 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.420074 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.420176 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.431601 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7fd7c44d-xwrpr"] Oct 08 07:33:51 crc kubenswrapper[4693]: E1008 07:33:51.454745 4693 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Oct 08 07:33:51 crc kubenswrapper[4693]: E1008 07:33:51.454913 4693 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gdmgt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-r7p2j_openstack(e1bdcd99-a53d-45ee-b439-57c0e0025fb9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 08 07:33:51 crc kubenswrapper[4693]: E1008 07:33:51.456108 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-r7p2j" podUID="e1bdcd99-a53d-45ee-b439-57c0e0025fb9" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.488547 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-k8rpf\" (UID: \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\") " pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.488951 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-config\") pod \"dnsmasq-dns-55f844cf75-k8rpf\" (UID: \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\") " pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.489118 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a935819-12b0-495b-944b-d74e091f176c-combined-ca-bundle\") pod \"neutron-7fd7c44d-xwrpr\" (UID: \"3a935819-12b0-495b-944b-d74e091f176c\") " pod="openstack/neutron-7fd7c44d-xwrpr" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.489156 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-k8rpf\" (UID: \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\") " pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.489178 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-274cl\" (UniqueName: \"kubernetes.io/projected/9ec230f9-e0bd-4760-aeef-08d63ef6b795-kube-api-access-274cl\") pod \"dnsmasq-dns-55f844cf75-k8rpf\" (UID: \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\") " pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.489207 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjd2c\" (UniqueName: \"kubernetes.io/projected/3a935819-12b0-495b-944b-d74e091f176c-kube-api-access-sjd2c\") pod \"neutron-7fd7c44d-xwrpr\" (UID: \"3a935819-12b0-495b-944b-d74e091f176c\") " pod="openstack/neutron-7fd7c44d-xwrpr" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.489299 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3a935819-12b0-495b-944b-d74e091f176c-httpd-config\") pod \"neutron-7fd7c44d-xwrpr\" (UID: \"3a935819-12b0-495b-944b-d74e091f176c\") " pod="openstack/neutron-7fd7c44d-xwrpr" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.489350 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a935819-12b0-495b-944b-d74e091f176c-ovndb-tls-certs\") pod \"neutron-7fd7c44d-xwrpr\" (UID: \"3a935819-12b0-495b-944b-d74e091f176c\") " pod="openstack/neutron-7fd7c44d-xwrpr" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.489423 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3a935819-12b0-495b-944b-d74e091f176c-config\") pod \"neutron-7fd7c44d-xwrpr\" (UID: \"3a935819-12b0-495b-944b-d74e091f176c\") " pod="openstack/neutron-7fd7c44d-xwrpr" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.489479 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-dns-svc\") pod \"dnsmasq-dns-55f844cf75-k8rpf\" (UID: \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\") " pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.489497 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-k8rpf\" (UID: \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\") " pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.489563 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-k8rpf\" (UID: \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\") " pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.489677 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-config\") pod \"dnsmasq-dns-55f844cf75-k8rpf\" (UID: \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\") " pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.490522 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-k8rpf\" (UID: \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\") " pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.490879 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-k8rpf\" (UID: \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\") " pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.491766 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-dns-svc\") pod \"dnsmasq-dns-55f844cf75-k8rpf\" (UID: \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\") " pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.516411 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-274cl\" (UniqueName: \"kubernetes.io/projected/9ec230f9-e0bd-4760-aeef-08d63ef6b795-kube-api-access-274cl\") pod \"dnsmasq-dns-55f844cf75-k8rpf\" (UID: \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\") " pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.546638 4693 scope.go:117] "RemoveContainer" containerID="26f7fd33ebf372415bbfe4f31de7020983156b14baccd40022a5bc5878755713" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.564502 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-c6w9b" podUID="e1409895-3bad-488c-a31c-2c4ed9b75d1c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: i/o timeout" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.591571 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3a935819-12b0-495b-944b-d74e091f176c-httpd-config\") pod \"neutron-7fd7c44d-xwrpr\" (UID: \"3a935819-12b0-495b-944b-d74e091f176c\") " pod="openstack/neutron-7fd7c44d-xwrpr" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.591618 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a935819-12b0-495b-944b-d74e091f176c-ovndb-tls-certs\") pod \"neutron-7fd7c44d-xwrpr\" (UID: \"3a935819-12b0-495b-944b-d74e091f176c\") " pod="openstack/neutron-7fd7c44d-xwrpr" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.591644 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3a935819-12b0-495b-944b-d74e091f176c-config\") pod \"neutron-7fd7c44d-xwrpr\" (UID: \"3a935819-12b0-495b-944b-d74e091f176c\") " pod="openstack/neutron-7fd7c44d-xwrpr" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.591738 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a935819-12b0-495b-944b-d74e091f176c-combined-ca-bundle\") pod \"neutron-7fd7c44d-xwrpr\" (UID: \"3a935819-12b0-495b-944b-d74e091f176c\") " pod="openstack/neutron-7fd7c44d-xwrpr" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.591762 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjd2c\" (UniqueName: \"kubernetes.io/projected/3a935819-12b0-495b-944b-d74e091f176c-kube-api-access-sjd2c\") pod \"neutron-7fd7c44d-xwrpr\" (UID: \"3a935819-12b0-495b-944b-d74e091f176c\") " pod="openstack/neutron-7fd7c44d-xwrpr" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.597564 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a935819-12b0-495b-944b-d74e091f176c-ovndb-tls-certs\") pod \"neutron-7fd7c44d-xwrpr\" (UID: \"3a935819-12b0-495b-944b-d74e091f176c\") " pod="openstack/neutron-7fd7c44d-xwrpr" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.598259 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3a935819-12b0-495b-944b-d74e091f176c-httpd-config\") pod \"neutron-7fd7c44d-xwrpr\" (UID: \"3a935819-12b0-495b-944b-d74e091f176c\") " pod="openstack/neutron-7fd7c44d-xwrpr" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.598940 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/3a935819-12b0-495b-944b-d74e091f176c-config\") pod \"neutron-7fd7c44d-xwrpr\" (UID: \"3a935819-12b0-495b-944b-d74e091f176c\") " pod="openstack/neutron-7fd7c44d-xwrpr" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.599750 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a935819-12b0-495b-944b-d74e091f176c-combined-ca-bundle\") pod \"neutron-7fd7c44d-xwrpr\" (UID: \"3a935819-12b0-495b-944b-d74e091f176c\") " pod="openstack/neutron-7fd7c44d-xwrpr" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.608938 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.612932 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjd2c\" (UniqueName: \"kubernetes.io/projected/3a935819-12b0-495b-944b-d74e091f176c-kube-api-access-sjd2c\") pod \"neutron-7fd7c44d-xwrpr\" (UID: \"3a935819-12b0-495b-944b-d74e091f176c\") " pod="openstack/neutron-7fd7c44d-xwrpr" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.704362 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7fd7c44d-xwrpr" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.712418 4693 scope.go:117] "RemoveContainer" containerID="ffe574e9d177e952f5cb2173c94d5abbcfc9b243eef95d37d195aab863dac74b" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.759037 4693 scope.go:117] "RemoveContainer" containerID="c4c49c3604b42c51ec78370f3bec626eef69ed2e477f8751170d9f819bcb6474" Oct 08 07:33:51 crc kubenswrapper[4693]: I1008 07:33:51.832607 4693 scope.go:117] "RemoveContainer" containerID="03c3df716366d99c8ccfa215ed3d84375ccf8bbcf5951bbfc75b536283b119b5" Oct 08 07:33:52 crc kubenswrapper[4693]: I1008 07:33:52.050116 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-7xn4v"] Oct 08 07:33:52 crc kubenswrapper[4693]: I1008 07:33:52.057057 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-748d7c6795-mlmk2" event={"ID":"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb","Type":"ContainerStarted","Data":"f1d657d07db54d7a93d42aeab030cd9ba9cfb1471a08290e6450c04c17c5e245"} Oct 08 07:33:52 crc kubenswrapper[4693]: I1008 07:33:52.064940 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-647ccf6b96-zrz9s"] Oct 08 07:33:52 crc kubenswrapper[4693]: E1008 07:33:52.085451 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-r7p2j" podUID="e1bdcd99-a53d-45ee-b439-57c0e0025fb9" Oct 08 07:33:52 crc kubenswrapper[4693]: I1008 07:33:52.155168 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-786b4cdb4-z6p8n"] Oct 08 07:33:52 crc kubenswrapper[4693]: W1008 07:33:52.164080 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f26734d_12eb_4c6c_9e68_254a30cea3b6.slice/crio-2156eb6310c90c4c918054bfca16898ca1517acfc7eb4777d35b6ec1f9977e51 WatchSource:0}: Error finding container 2156eb6310c90c4c918054bfca16898ca1517acfc7eb4777d35b6ec1f9977e51: Status 404 returned error can't find the container with id 2156eb6310c90c4c918054bfca16898ca1517acfc7eb4777d35b6ec1f9977e51 Oct 08 07:33:52 crc kubenswrapper[4693]: I1008 07:33:52.221673 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 08 07:33:52 crc kubenswrapper[4693]: I1008 07:33:52.336368 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-k8rpf"] Oct 08 07:33:52 crc kubenswrapper[4693]: I1008 07:33:52.432534 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7fd7c44d-xwrpr"] Oct 08 07:33:52 crc kubenswrapper[4693]: W1008 07:33:52.543386 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9ec230f9_e0bd_4760_aeef_08d63ef6b795.slice/crio-5bfe6ac4dcc612359f9b1efc021941edd205ed9e16410e97e5cb63e527810a37 WatchSource:0}: Error finding container 5bfe6ac4dcc612359f9b1efc021941edd205ed9e16410e97e5cb63e527810a37: Status 404 returned error can't find the container with id 5bfe6ac4dcc612359f9b1efc021941edd205ed9e16410e97e5cb63e527810a37 Oct 08 07:33:52 crc kubenswrapper[4693]: W1008 07:33:52.546303 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3a935819_12b0_495b_944b_d74e091f176c.slice/crio-a2f09f509a1f529623ea01907dc482912f637f8e3da1baa23a33c51cf1c7677a WatchSource:0}: Error finding container a2f09f509a1f529623ea01907dc482912f637f8e3da1baa23a33c51cf1c7677a: Status 404 returned error can't find the container with id a2f09f509a1f529623ea01907dc482912f637f8e3da1baa23a33c51cf1c7677a Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.092765 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7fd7c44d-xwrpr" event={"ID":"3a935819-12b0-495b-944b-d74e091f176c","Type":"ContainerStarted","Data":"94146f4bfca7747b4b942bfe8395949b5670d17a4eec6657e0f6afb104e9df10"} Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.093558 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7fd7c44d-xwrpr" event={"ID":"3a935819-12b0-495b-944b-d74e091f176c","Type":"ContainerStarted","Data":"a2f09f509a1f529623ea01907dc482912f637f8e3da1baa23a33c51cf1c7677a"} Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.094353 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-786b4cdb4-z6p8n" event={"ID":"1f26734d-12eb-4c6c-9e68-254a30cea3b6","Type":"ContainerStarted","Data":"c0a09bc510b334009c920bb57ea1df562a7d15a6ff85e454784fd542f35e6644"} Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.094379 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-786b4cdb4-z6p8n" event={"ID":"1f26734d-12eb-4c6c-9e68-254a30cea3b6","Type":"ContainerStarted","Data":"2156eb6310c90c4c918054bfca16898ca1517acfc7eb4777d35b6ec1f9977e51"} Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.096470 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-748d7c6795-mlmk2" event={"ID":"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb","Type":"ContainerStarted","Data":"81a08bae25ca456c6009cc8ab6e277958093ac319c70f3d93a88bb65ee3c6dba"} Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.096590 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-748d7c6795-mlmk2" podUID="a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb" containerName="horizon-log" containerID="cri-o://f1d657d07db54d7a93d42aeab030cd9ba9cfb1471a08290e6450c04c17c5e245" gracePeriod=30 Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.097061 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-748d7c6795-mlmk2" podUID="a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb" containerName="horizon" containerID="cri-o://81a08bae25ca456c6009cc8ab6e277958093ac319c70f3d93a88bb65ee3c6dba" gracePeriod=30 Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.102467 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7xn4v" event={"ID":"04536603-9f35-460a-b169-a462d38283b9","Type":"ContainerStarted","Data":"db6f7cd08db4690295e1fa61dc78bec7b1189a82f31b277a7de282b99972fe63"} Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.102744 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7xn4v" event={"ID":"04536603-9f35-460a-b169-a462d38283b9","Type":"ContainerStarted","Data":"230ffea816a224f28365615325641173a01746bc3a19f505895d358c648b6532"} Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.112123 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-647ccf6b96-zrz9s" event={"ID":"4c13d244-5d68-4fdc-834e-90409425f7f4","Type":"ContainerStarted","Data":"1e19d1e84dee0ab2547ac52659a0dbc638b95ad225686a77cb411f0a920dc101"} Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.112157 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-647ccf6b96-zrz9s" event={"ID":"4c13d244-5d68-4fdc-834e-90409425f7f4","Type":"ContainerStarted","Data":"fc92eb0f369f2698c35e8a4718fd26c5bba374f74c755d0186e9ffc29c1c2837"} Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.112167 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-647ccf6b96-zrz9s" event={"ID":"4c13d244-5d68-4fdc-834e-90409425f7f4","Type":"ContainerStarted","Data":"58bc6c46396681ef02b318afaae45c284ed34c9123dd771ca58364d7c3a4722e"} Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.114505 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"210e5669-78d1-47cb-8e46-e00c9764c2c7","Type":"ContainerStarted","Data":"a65371312968a2f31bd9aa6d30c2f19e9fc3505841829c52c1c53ac32cb7c3c1"} Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.118743 4693 generic.go:334] "Generic (PLEG): container finished" podID="9ec230f9-e0bd-4760-aeef-08d63ef6b795" containerID="4719a002be72b0ac919a4629317471dd385acad5e34152d64947d9652de199a6" exitCode=0 Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.118800 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" event={"ID":"9ec230f9-e0bd-4760-aeef-08d63ef6b795","Type":"ContainerDied","Data":"4719a002be72b0ac919a4629317471dd385acad5e34152d64947d9652de199a6"} Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.118842 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" event={"ID":"9ec230f9-e0bd-4760-aeef-08d63ef6b795","Type":"ContainerStarted","Data":"5bfe6ac4dcc612359f9b1efc021941edd205ed9e16410e97e5cb63e527810a37"} Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.126160 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-748d7c6795-mlmk2" podStartSLOduration=3.859075121 podStartE2EDuration="33.126145373s" podCreationTimestamp="2025-10-08 07:33:20 +0000 UTC" firstStartedPulling="2025-10-08 07:33:22.078448418 +0000 UTC m=+987.449413353" lastFinishedPulling="2025-10-08 07:33:51.34551867 +0000 UTC m=+1016.716483605" observedRunningTime="2025-10-08 07:33:53.125461315 +0000 UTC m=+1018.496426250" watchObservedRunningTime="2025-10-08 07:33:53.126145373 +0000 UTC m=+1018.497110318" Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.132761 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"019964d2-ae2a-4501-ad53-c6c0b2260a32","Type":"ContainerStarted","Data":"2696cae1cd9f5a2788793af3e3f4fd8afcfc002dfd3117070c05fa4d9af2521a"} Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.140715 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-7xn4v" podStartSLOduration=26.140699346 podStartE2EDuration="26.140699346s" podCreationTimestamp="2025-10-08 07:33:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:33:53.138852128 +0000 UTC m=+1018.509817063" watchObservedRunningTime="2025-10-08 07:33:53.140699346 +0000 UTC m=+1018.511664281" Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.318596 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-647ccf6b96-zrz9s" podStartSLOduration=22.318576028 podStartE2EDuration="22.318576028s" podCreationTimestamp="2025-10-08 07:33:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:33:53.179405897 +0000 UTC m=+1018.550370832" watchObservedRunningTime="2025-10-08 07:33:53.318576028 +0000 UTC m=+1018.689540963" Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.334908 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.574515 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-77bfdd5769-m42ll"] Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.576058 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-77bfdd5769-m42ll" Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.582167 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.582327 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.584495 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-77bfdd5769-m42ll"] Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.761139 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e8c282e4-9865-41ec-922f-86d322b60ea0-config\") pod \"neutron-77bfdd5769-m42ll\" (UID: \"e8c282e4-9865-41ec-922f-86d322b60ea0\") " pod="openstack/neutron-77bfdd5769-m42ll" Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.761215 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e8c282e4-9865-41ec-922f-86d322b60ea0-httpd-config\") pod \"neutron-77bfdd5769-m42ll\" (UID: \"e8c282e4-9865-41ec-922f-86d322b60ea0\") " pod="openstack/neutron-77bfdd5769-m42ll" Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.761237 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8c282e4-9865-41ec-922f-86d322b60ea0-public-tls-certs\") pod \"neutron-77bfdd5769-m42ll\" (UID: \"e8c282e4-9865-41ec-922f-86d322b60ea0\") " pod="openstack/neutron-77bfdd5769-m42ll" Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.761270 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8c282e4-9865-41ec-922f-86d322b60ea0-internal-tls-certs\") pod \"neutron-77bfdd5769-m42ll\" (UID: \"e8c282e4-9865-41ec-922f-86d322b60ea0\") " pod="openstack/neutron-77bfdd5769-m42ll" Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.761296 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8c282e4-9865-41ec-922f-86d322b60ea0-combined-ca-bundle\") pod \"neutron-77bfdd5769-m42ll\" (UID: \"e8c282e4-9865-41ec-922f-86d322b60ea0\") " pod="openstack/neutron-77bfdd5769-m42ll" Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.761313 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8c282e4-9865-41ec-922f-86d322b60ea0-ovndb-tls-certs\") pod \"neutron-77bfdd5769-m42ll\" (UID: \"e8c282e4-9865-41ec-922f-86d322b60ea0\") " pod="openstack/neutron-77bfdd5769-m42ll" Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.761343 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdlkx\" (UniqueName: \"kubernetes.io/projected/e8c282e4-9865-41ec-922f-86d322b60ea0-kube-api-access-rdlkx\") pod \"neutron-77bfdd5769-m42ll\" (UID: \"e8c282e4-9865-41ec-922f-86d322b60ea0\") " pod="openstack/neutron-77bfdd5769-m42ll" Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.865781 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e8c282e4-9865-41ec-922f-86d322b60ea0-config\") pod \"neutron-77bfdd5769-m42ll\" (UID: \"e8c282e4-9865-41ec-922f-86d322b60ea0\") " pod="openstack/neutron-77bfdd5769-m42ll" Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.865872 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e8c282e4-9865-41ec-922f-86d322b60ea0-httpd-config\") pod \"neutron-77bfdd5769-m42ll\" (UID: \"e8c282e4-9865-41ec-922f-86d322b60ea0\") " pod="openstack/neutron-77bfdd5769-m42ll" Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.865891 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8c282e4-9865-41ec-922f-86d322b60ea0-public-tls-certs\") pod \"neutron-77bfdd5769-m42ll\" (UID: \"e8c282e4-9865-41ec-922f-86d322b60ea0\") " pod="openstack/neutron-77bfdd5769-m42ll" Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.865924 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8c282e4-9865-41ec-922f-86d322b60ea0-internal-tls-certs\") pod \"neutron-77bfdd5769-m42ll\" (UID: \"e8c282e4-9865-41ec-922f-86d322b60ea0\") " pod="openstack/neutron-77bfdd5769-m42ll" Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.865949 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8c282e4-9865-41ec-922f-86d322b60ea0-combined-ca-bundle\") pod \"neutron-77bfdd5769-m42ll\" (UID: \"e8c282e4-9865-41ec-922f-86d322b60ea0\") " pod="openstack/neutron-77bfdd5769-m42ll" Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.865966 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8c282e4-9865-41ec-922f-86d322b60ea0-ovndb-tls-certs\") pod \"neutron-77bfdd5769-m42ll\" (UID: \"e8c282e4-9865-41ec-922f-86d322b60ea0\") " pod="openstack/neutron-77bfdd5769-m42ll" Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.865993 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdlkx\" (UniqueName: \"kubernetes.io/projected/e8c282e4-9865-41ec-922f-86d322b60ea0-kube-api-access-rdlkx\") pod \"neutron-77bfdd5769-m42ll\" (UID: \"e8c282e4-9865-41ec-922f-86d322b60ea0\") " pod="openstack/neutron-77bfdd5769-m42ll" Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.886304 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8c282e4-9865-41ec-922f-86d322b60ea0-combined-ca-bundle\") pod \"neutron-77bfdd5769-m42ll\" (UID: \"e8c282e4-9865-41ec-922f-86d322b60ea0\") " pod="openstack/neutron-77bfdd5769-m42ll" Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.886575 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8c282e4-9865-41ec-922f-86d322b60ea0-ovndb-tls-certs\") pod \"neutron-77bfdd5769-m42ll\" (UID: \"e8c282e4-9865-41ec-922f-86d322b60ea0\") " pod="openstack/neutron-77bfdd5769-m42ll" Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.889493 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdlkx\" (UniqueName: \"kubernetes.io/projected/e8c282e4-9865-41ec-922f-86d322b60ea0-kube-api-access-rdlkx\") pod \"neutron-77bfdd5769-m42ll\" (UID: \"e8c282e4-9865-41ec-922f-86d322b60ea0\") " pod="openstack/neutron-77bfdd5769-m42ll" Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.890322 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/e8c282e4-9865-41ec-922f-86d322b60ea0-config\") pod \"neutron-77bfdd5769-m42ll\" (UID: \"e8c282e4-9865-41ec-922f-86d322b60ea0\") " pod="openstack/neutron-77bfdd5769-m42ll" Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.891385 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8c282e4-9865-41ec-922f-86d322b60ea0-internal-tls-certs\") pod \"neutron-77bfdd5769-m42ll\" (UID: \"e8c282e4-9865-41ec-922f-86d322b60ea0\") " pod="openstack/neutron-77bfdd5769-m42ll" Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.894399 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8c282e4-9865-41ec-922f-86d322b60ea0-public-tls-certs\") pod \"neutron-77bfdd5769-m42ll\" (UID: \"e8c282e4-9865-41ec-922f-86d322b60ea0\") " pod="openstack/neutron-77bfdd5769-m42ll" Oct 08 07:33:53 crc kubenswrapper[4693]: I1008 07:33:53.922451 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e8c282e4-9865-41ec-922f-86d322b60ea0-httpd-config\") pod \"neutron-77bfdd5769-m42ll\" (UID: \"e8c282e4-9865-41ec-922f-86d322b60ea0\") " pod="openstack/neutron-77bfdd5769-m42ll" Oct 08 07:33:54 crc kubenswrapper[4693]: I1008 07:33:53.999832 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-77bfdd5769-m42ll" Oct 08 07:33:54 crc kubenswrapper[4693]: I1008 07:33:54.152716 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-786b4cdb4-z6p8n" event={"ID":"1f26734d-12eb-4c6c-9e68-254a30cea3b6","Type":"ContainerStarted","Data":"8a97d7bc206ce3d65b0ecad791b70ed67c3f16fef2c6bae9b6e12f3d4d1dc49b"} Oct 08 07:33:54 crc kubenswrapper[4693]: I1008 07:33:54.156045 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"de2f6c37-8e32-4276-bbe3-9f5c404a18b3","Type":"ContainerStarted","Data":"8f63e789397696ce043caf6fe805789004d736bbfd8b47f4e1f34949eb4da4ac"} Oct 08 07:33:54 crc kubenswrapper[4693]: I1008 07:33:54.159756 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"210e5669-78d1-47cb-8e46-e00c9764c2c7","Type":"ContainerStarted","Data":"20b8b158dd96d34de556d3d25ca53f3eccd970618c42b36bcdf3301c244bb29e"} Oct 08 07:33:54 crc kubenswrapper[4693]: I1008 07:33:54.174597 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-rxtmf" event={"ID":"79884c8c-f689-46b7-9223-66bd0b7bff8e","Type":"ContainerStarted","Data":"dc4f68dded1f91f34c7f0e69513d47d93be66115785ca34b370b996156a1dcda"} Oct 08 07:33:54 crc kubenswrapper[4693]: I1008 07:33:54.183234 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" event={"ID":"9ec230f9-e0bd-4760-aeef-08d63ef6b795","Type":"ContainerStarted","Data":"23fdbe5298b1e5fbd8540bd37a1bfde51d58e34ee714890f8d60e70751218414"} Oct 08 07:33:54 crc kubenswrapper[4693]: I1008 07:33:54.183522 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" Oct 08 07:33:54 crc kubenswrapper[4693]: I1008 07:33:54.189892 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7fd7c44d-xwrpr" event={"ID":"3a935819-12b0-495b-944b-d74e091f176c","Type":"ContainerStarted","Data":"75e0792a492c6155b18866c1c8b8c7919f159e31e7a4b2391a17fe20ee1f2010"} Oct 08 07:33:54 crc kubenswrapper[4693]: I1008 07:33:54.190363 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7fd7c44d-xwrpr" Oct 08 07:33:54 crc kubenswrapper[4693]: I1008 07:33:54.191592 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-786b4cdb4-z6p8n" podStartSLOduration=23.191579283 podStartE2EDuration="23.191579283s" podCreationTimestamp="2025-10-08 07:33:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:33:54.175608792 +0000 UTC m=+1019.546573727" watchObservedRunningTime="2025-10-08 07:33:54.191579283 +0000 UTC m=+1019.562544218" Oct 08 07:33:54 crc kubenswrapper[4693]: I1008 07:33:54.195667 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-m4xlg" event={"ID":"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe","Type":"ContainerStarted","Data":"fbfe4123cc4ff84eda46dc37b6caca960cdd19adbd16833385533ac145451e09"} Oct 08 07:33:54 crc kubenswrapper[4693]: I1008 07:33:54.199661 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-rxtmf" podStartSLOduration=3.448209591 podStartE2EDuration="37.199645766s" podCreationTimestamp="2025-10-08 07:33:17 +0000 UTC" firstStartedPulling="2025-10-08 07:33:19.135896089 +0000 UTC m=+984.506861024" lastFinishedPulling="2025-10-08 07:33:52.887332264 +0000 UTC m=+1018.258297199" observedRunningTime="2025-10-08 07:33:54.190401362 +0000 UTC m=+1019.561366297" watchObservedRunningTime="2025-10-08 07:33:54.199645766 +0000 UTC m=+1019.570610701" Oct 08 07:33:54 crc kubenswrapper[4693]: I1008 07:33:54.225506 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7fd7c44d-xwrpr" podStartSLOduration=3.225485918 podStartE2EDuration="3.225485918s" podCreationTimestamp="2025-10-08 07:33:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:33:54.209243969 +0000 UTC m=+1019.580208904" watchObservedRunningTime="2025-10-08 07:33:54.225485918 +0000 UTC m=+1019.596450853" Oct 08 07:33:54 crc kubenswrapper[4693]: I1008 07:33:54.237202 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" podStartSLOduration=3.237010692 podStartE2EDuration="3.237010692s" podCreationTimestamp="2025-10-08 07:33:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:33:54.231360193 +0000 UTC m=+1019.602325148" watchObservedRunningTime="2025-10-08 07:33:54.237010692 +0000 UTC m=+1019.607975627" Oct 08 07:33:54 crc kubenswrapper[4693]: I1008 07:33:54.249615 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-m4xlg" podStartSLOduration=3.6987958020000002 podStartE2EDuration="37.249598944s" podCreationTimestamp="2025-10-08 07:33:17 +0000 UTC" firstStartedPulling="2025-10-08 07:33:19.337762424 +0000 UTC m=+984.708727359" lastFinishedPulling="2025-10-08 07:33:52.888565566 +0000 UTC m=+1018.259530501" observedRunningTime="2025-10-08 07:33:54.247978031 +0000 UTC m=+1019.618942966" watchObservedRunningTime="2025-10-08 07:33:54.249598944 +0000 UTC m=+1019.620563879" Oct 08 07:33:55 crc kubenswrapper[4693]: I1008 07:33:55.172003 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-77bfdd5769-m42ll"] Oct 08 07:33:55 crc kubenswrapper[4693]: I1008 07:33:55.215638 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"210e5669-78d1-47cb-8e46-e00c9764c2c7","Type":"ContainerStarted","Data":"8b0653ccc1f9f1774b8e9b228531dd207898af52527e81451c392d21cdf86a27"} Oct 08 07:33:55 crc kubenswrapper[4693]: I1008 07:33:55.232040 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-77bfdd5769-m42ll" event={"ID":"e8c282e4-9865-41ec-922f-86d322b60ea0","Type":"ContainerStarted","Data":"807f4d48dcaed2ce361006f31b08109ddd0073a6b13f171ba8cca10a56f957e5"} Oct 08 07:33:55 crc kubenswrapper[4693]: I1008 07:33:55.243158 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"de2f6c37-8e32-4276-bbe3-9f5c404a18b3","Type":"ContainerStarted","Data":"53d8c21458e02a80b867ac8e0e974d9aa93fa90b186a36379f7f5e985e9ad685"} Oct 08 07:33:55 crc kubenswrapper[4693]: I1008 07:33:55.249804 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=16.249787304 podStartE2EDuration="16.249787304s" podCreationTimestamp="2025-10-08 07:33:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:33:55.235967669 +0000 UTC m=+1020.606932604" watchObservedRunningTime="2025-10-08 07:33:55.249787304 +0000 UTC m=+1020.620752239" Oct 08 07:33:56 crc kubenswrapper[4693]: I1008 07:33:56.251934 4693 generic.go:334] "Generic (PLEG): container finished" podID="04536603-9f35-460a-b169-a462d38283b9" containerID="db6f7cd08db4690295e1fa61dc78bec7b1189a82f31b277a7de282b99972fe63" exitCode=0 Oct 08 07:33:56 crc kubenswrapper[4693]: I1008 07:33:56.252039 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7xn4v" event={"ID":"04536603-9f35-460a-b169-a462d38283b9","Type":"ContainerDied","Data":"db6f7cd08db4690295e1fa61dc78bec7b1189a82f31b277a7de282b99972fe63"} Oct 08 07:33:56 crc kubenswrapper[4693]: I1008 07:33:56.255589 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-77bfdd5769-m42ll" event={"ID":"e8c282e4-9865-41ec-922f-86d322b60ea0","Type":"ContainerStarted","Data":"a80f9967f72e57ad4bbc46a2b9402a486efc6a78a28f8e94d9599937d8cf790f"} Oct 08 07:33:56 crc kubenswrapper[4693]: I1008 07:33:56.255629 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-77bfdd5769-m42ll" event={"ID":"e8c282e4-9865-41ec-922f-86d322b60ea0","Type":"ContainerStarted","Data":"14e181d3c7395449b2612a469c41480eab41c0029f16aad038b80c9433474fcd"} Oct 08 07:33:56 crc kubenswrapper[4693]: I1008 07:33:56.255879 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-77bfdd5769-m42ll" Oct 08 07:33:56 crc kubenswrapper[4693]: I1008 07:33:56.261560 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"de2f6c37-8e32-4276-bbe3-9f5c404a18b3","Type":"ContainerStarted","Data":"faf4a2d70b7180f1e2b575fad7f563261e2b2f73d26cf1007620670089098961"} Oct 08 07:33:56 crc kubenswrapper[4693]: I1008 07:33:56.302205 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-77bfdd5769-m42ll" podStartSLOduration=3.302182809 podStartE2EDuration="3.302182809s" podCreationTimestamp="2025-10-08 07:33:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:33:56.299263772 +0000 UTC m=+1021.670228717" watchObservedRunningTime="2025-10-08 07:33:56.302182809 +0000 UTC m=+1021.673147744" Oct 08 07:33:56 crc kubenswrapper[4693]: I1008 07:33:56.318096 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.318077619 podStartE2EDuration="6.318077619s" podCreationTimestamp="2025-10-08 07:33:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:33:56.315127561 +0000 UTC m=+1021.686092496" watchObservedRunningTime="2025-10-08 07:33:56.318077619 +0000 UTC m=+1021.689042554" Oct 08 07:33:57 crc kubenswrapper[4693]: I1008 07:33:57.279188 4693 generic.go:334] "Generic (PLEG): container finished" podID="b4d301e9-d078-4876-a6a2-52a7c3b4dcbe" containerID="fbfe4123cc4ff84eda46dc37b6caca960cdd19adbd16833385533ac145451e09" exitCode=0 Oct 08 07:33:57 crc kubenswrapper[4693]: I1008 07:33:57.279411 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-m4xlg" event={"ID":"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe","Type":"ContainerDied","Data":"fbfe4123cc4ff84eda46dc37b6caca960cdd19adbd16833385533ac145451e09"} Oct 08 07:33:59 crc kubenswrapper[4693]: I1008 07:33:59.300276 4693 generic.go:334] "Generic (PLEG): container finished" podID="79884c8c-f689-46b7-9223-66bd0b7bff8e" containerID="dc4f68dded1f91f34c7f0e69513d47d93be66115785ca34b370b996156a1dcda" exitCode=0 Oct 08 07:33:59 crc kubenswrapper[4693]: I1008 07:33:59.300317 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-rxtmf" event={"ID":"79884c8c-f689-46b7-9223-66bd0b7bff8e","Type":"ContainerDied","Data":"dc4f68dded1f91f34c7f0e69513d47d93be66115785ca34b370b996156a1dcda"} Oct 08 07:33:59 crc kubenswrapper[4693]: I1008 07:33:59.648061 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 08 07:33:59 crc kubenswrapper[4693]: I1008 07:33:59.648118 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 08 07:33:59 crc kubenswrapper[4693]: I1008 07:33:59.691159 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 08 07:33:59 crc kubenswrapper[4693]: I1008 07:33:59.706844 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 08 07:33:59 crc kubenswrapper[4693]: I1008 07:33:59.967493 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-m4xlg" Oct 08 07:33:59 crc kubenswrapper[4693]: I1008 07:33:59.971699 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7xn4v" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.028422 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-config-data\") pod \"04536603-9f35-460a-b169-a462d38283b9\" (UID: \"04536603-9f35-460a-b169-a462d38283b9\") " Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.028475 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-scripts\") pod \"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe\" (UID: \"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe\") " Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.028523 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-fernet-keys\") pod \"04536603-9f35-460a-b169-a462d38283b9\" (UID: \"04536603-9f35-460a-b169-a462d38283b9\") " Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.028561 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-combined-ca-bundle\") pod \"04536603-9f35-460a-b169-a462d38283b9\" (UID: \"04536603-9f35-460a-b169-a462d38283b9\") " Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.028612 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hr6rs\" (UniqueName: \"kubernetes.io/projected/04536603-9f35-460a-b169-a462d38283b9-kube-api-access-hr6rs\") pod \"04536603-9f35-460a-b169-a462d38283b9\" (UID: \"04536603-9f35-460a-b169-a462d38283b9\") " Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.028675 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-combined-ca-bundle\") pod \"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe\" (UID: \"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe\") " Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.028735 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-credential-keys\") pod \"04536603-9f35-460a-b169-a462d38283b9\" (UID: \"04536603-9f35-460a-b169-a462d38283b9\") " Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.028766 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vgg9b\" (UniqueName: \"kubernetes.io/projected/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-kube-api-access-vgg9b\") pod \"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe\" (UID: \"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe\") " Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.028830 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-config-data\") pod \"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe\" (UID: \"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe\") " Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.028913 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-logs\") pod \"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe\" (UID: \"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe\") " Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.028967 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-scripts\") pod \"04536603-9f35-460a-b169-a462d38283b9\" (UID: \"04536603-9f35-460a-b169-a462d38283b9\") " Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.031262 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-logs" (OuterVolumeSpecName: "logs") pod "b4d301e9-d078-4876-a6a2-52a7c3b4dcbe" (UID: "b4d301e9-d078-4876-a6a2-52a7c3b4dcbe"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.046578 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "04536603-9f35-460a-b169-a462d38283b9" (UID: "04536603-9f35-460a-b169-a462d38283b9"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.047003 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-scripts" (OuterVolumeSpecName: "scripts") pod "04536603-9f35-460a-b169-a462d38283b9" (UID: "04536603-9f35-460a-b169-a462d38283b9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.055052 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-scripts" (OuterVolumeSpecName: "scripts") pod "b4d301e9-d078-4876-a6a2-52a7c3b4dcbe" (UID: "b4d301e9-d078-4876-a6a2-52a7c3b4dcbe"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.061043 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-kube-api-access-vgg9b" (OuterVolumeSpecName: "kube-api-access-vgg9b") pod "b4d301e9-d078-4876-a6a2-52a7c3b4dcbe" (UID: "b4d301e9-d078-4876-a6a2-52a7c3b4dcbe"). InnerVolumeSpecName "kube-api-access-vgg9b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.065256 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "04536603-9f35-460a-b169-a462d38283b9" (UID: "04536603-9f35-460a-b169-a462d38283b9"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.073124 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04536603-9f35-460a-b169-a462d38283b9-kube-api-access-hr6rs" (OuterVolumeSpecName: "kube-api-access-hr6rs") pod "04536603-9f35-460a-b169-a462d38283b9" (UID: "04536603-9f35-460a-b169-a462d38283b9"). InnerVolumeSpecName "kube-api-access-hr6rs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.092138 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b4d301e9-d078-4876-a6a2-52a7c3b4dcbe" (UID: "b4d301e9-d078-4876-a6a2-52a7c3b4dcbe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.101885 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-config-data" (OuterVolumeSpecName: "config-data") pod "b4d301e9-d078-4876-a6a2-52a7c3b4dcbe" (UID: "b4d301e9-d078-4876-a6a2-52a7c3b4dcbe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.104710 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-config-data" (OuterVolumeSpecName: "config-data") pod "04536603-9f35-460a-b169-a462d38283b9" (UID: "04536603-9f35-460a-b169-a462d38283b9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.131484 4693 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.131745 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vgg9b\" (UniqueName: \"kubernetes.io/projected/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-kube-api-access-vgg9b\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.131848 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.131929 4693 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-logs\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.131989 4693 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-scripts\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.132047 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.132099 4693 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-scripts\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.132152 4693 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.132204 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hr6rs\" (UniqueName: \"kubernetes.io/projected/04536603-9f35-460a-b169-a462d38283b9-kube-api-access-hr6rs\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.132257 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.134884 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "04536603-9f35-460a-b169-a462d38283b9" (UID: "04536603-9f35-460a-b169-a462d38283b9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.233650 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04536603-9f35-460a-b169-a462d38283b9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.326799 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7xn4v" event={"ID":"04536603-9f35-460a-b169-a462d38283b9","Type":"ContainerDied","Data":"230ffea816a224f28365615325641173a01746bc3a19f505895d358c648b6532"} Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.326848 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="230ffea816a224f28365615325641173a01746bc3a19f505895d358c648b6532" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.326860 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7xn4v" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.335613 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-m4xlg" event={"ID":"b4d301e9-d078-4876-a6a2-52a7c3b4dcbe","Type":"ContainerDied","Data":"a26f89784069255b23e6d92b51296afb3716749d97d4e8e04150d1d52ec765cd"} Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.335662 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a26f89784069255b23e6d92b51296afb3716749d97d4e8e04150d1d52ec765cd" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.336039 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-m4xlg" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.336447 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.336475 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.463175 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.464008 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.510282 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.532261 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.710422 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-rxtmf" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.747624 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/79884c8c-f689-46b7-9223-66bd0b7bff8e-db-sync-config-data\") pod \"79884c8c-f689-46b7-9223-66bd0b7bff8e\" (UID: \"79884c8c-f689-46b7-9223-66bd0b7bff8e\") " Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.747926 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcpj6\" (UniqueName: \"kubernetes.io/projected/79884c8c-f689-46b7-9223-66bd0b7bff8e-kube-api-access-xcpj6\") pod \"79884c8c-f689-46b7-9223-66bd0b7bff8e\" (UID: \"79884c8c-f689-46b7-9223-66bd0b7bff8e\") " Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.748080 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79884c8c-f689-46b7-9223-66bd0b7bff8e-combined-ca-bundle\") pod \"79884c8c-f689-46b7-9223-66bd0b7bff8e\" (UID: \"79884c8c-f689-46b7-9223-66bd0b7bff8e\") " Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.752204 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79884c8c-f689-46b7-9223-66bd0b7bff8e-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "79884c8c-f689-46b7-9223-66bd0b7bff8e" (UID: "79884c8c-f689-46b7-9223-66bd0b7bff8e"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.757462 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79884c8c-f689-46b7-9223-66bd0b7bff8e-kube-api-access-xcpj6" (OuterVolumeSpecName: "kube-api-access-xcpj6") pod "79884c8c-f689-46b7-9223-66bd0b7bff8e" (UID: "79884c8c-f689-46b7-9223-66bd0b7bff8e"). InnerVolumeSpecName "kube-api-access-xcpj6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.777901 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79884c8c-f689-46b7-9223-66bd0b7bff8e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "79884c8c-f689-46b7-9223-66bd0b7bff8e" (UID: "79884c8c-f689-46b7-9223-66bd0b7bff8e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.849737 4693 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/79884c8c-f689-46b7-9223-66bd0b7bff8e-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.849770 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcpj6\" (UniqueName: \"kubernetes.io/projected/79884c8c-f689-46b7-9223-66bd0b7bff8e-kube-api-access-xcpj6\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.849781 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79884c8c-f689-46b7-9223-66bd0b7bff8e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:00 crc kubenswrapper[4693]: I1008 07:34:00.926430 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-748d7c6795-mlmk2" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.114567 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-5bbc7cbf94-5tkqs"] Oct 08 07:34:01 crc kubenswrapper[4693]: E1008 07:34:01.114931 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4d301e9-d078-4876-a6a2-52a7c3b4dcbe" containerName="placement-db-sync" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.114948 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4d301e9-d078-4876-a6a2-52a7c3b4dcbe" containerName="placement-db-sync" Oct 08 07:34:01 crc kubenswrapper[4693]: E1008 07:34:01.114963 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04536603-9f35-460a-b169-a462d38283b9" containerName="keystone-bootstrap" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.114968 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="04536603-9f35-460a-b169-a462d38283b9" containerName="keystone-bootstrap" Oct 08 07:34:01 crc kubenswrapper[4693]: E1008 07:34:01.114979 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79884c8c-f689-46b7-9223-66bd0b7bff8e" containerName="barbican-db-sync" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.114985 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="79884c8c-f689-46b7-9223-66bd0b7bff8e" containerName="barbican-db-sync" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.115146 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="79884c8c-f689-46b7-9223-66bd0b7bff8e" containerName="barbican-db-sync" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.115159 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4d301e9-d078-4876-a6a2-52a7c3b4dcbe" containerName="placement-db-sync" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.115168 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="04536603-9f35-460a-b169-a462d38283b9" containerName="keystone-bootstrap" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.115698 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.118136 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.118296 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.118487 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.118777 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.118928 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.119126 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-nd6nt" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.132858 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-84d7c4f8cb-75jz5"] Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.134945 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-84d7c4f8cb-75jz5" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.140734 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.141146 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.142188 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.146885 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.147018 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-s2pl4" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.154036 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e4dc4d60-5d83-4f09-986c-a394c44788b5-credential-keys\") pod \"keystone-5bbc7cbf94-5tkqs\" (UID: \"e4dc4d60-5d83-4f09-986c-a394c44788b5\") " pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.154073 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4dc4d60-5d83-4f09-986c-a394c44788b5-config-data\") pod \"keystone-5bbc7cbf94-5tkqs\" (UID: \"e4dc4d60-5d83-4f09-986c-a394c44788b5\") " pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.154094 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4dc4d60-5d83-4f09-986c-a394c44788b5-public-tls-certs\") pod \"keystone-5bbc7cbf94-5tkqs\" (UID: \"e4dc4d60-5d83-4f09-986c-a394c44788b5\") " pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.154170 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4dc4d60-5d83-4f09-986c-a394c44788b5-scripts\") pod \"keystone-5bbc7cbf94-5tkqs\" (UID: \"e4dc4d60-5d83-4f09-986c-a394c44788b5\") " pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.154198 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4dc4d60-5d83-4f09-986c-a394c44788b5-internal-tls-certs\") pod \"keystone-5bbc7cbf94-5tkqs\" (UID: \"e4dc4d60-5d83-4f09-986c-a394c44788b5\") " pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.154227 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwzd4\" (UniqueName: \"kubernetes.io/projected/e4dc4d60-5d83-4f09-986c-a394c44788b5-kube-api-access-pwzd4\") pod \"keystone-5bbc7cbf94-5tkqs\" (UID: \"e4dc4d60-5d83-4f09-986c-a394c44788b5\") " pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.154258 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4dc4d60-5d83-4f09-986c-a394c44788b5-combined-ca-bundle\") pod \"keystone-5bbc7cbf94-5tkqs\" (UID: \"e4dc4d60-5d83-4f09-986c-a394c44788b5\") " pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.154287 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e4dc4d60-5d83-4f09-986c-a394c44788b5-fernet-keys\") pod \"keystone-5bbc7cbf94-5tkqs\" (UID: \"e4dc4d60-5d83-4f09-986c-a394c44788b5\") " pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.163424 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-84d7c4f8cb-75jz5"] Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.198558 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5bbc7cbf94-5tkqs"] Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.255780 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e4dc4d60-5d83-4f09-986c-a394c44788b5-credential-keys\") pod \"keystone-5bbc7cbf94-5tkqs\" (UID: \"e4dc4d60-5d83-4f09-986c-a394c44788b5\") " pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.255829 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4dc4d60-5d83-4f09-986c-a394c44788b5-config-data\") pod \"keystone-5bbc7cbf94-5tkqs\" (UID: \"e4dc4d60-5d83-4f09-986c-a394c44788b5\") " pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.255848 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4dc4d60-5d83-4f09-986c-a394c44788b5-public-tls-certs\") pod \"keystone-5bbc7cbf94-5tkqs\" (UID: \"e4dc4d60-5d83-4f09-986c-a394c44788b5\") " pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.255879 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7285e65f-f435-4b74-8019-c5acad9b74c7-public-tls-certs\") pod \"placement-84d7c4f8cb-75jz5\" (UID: \"7285e65f-f435-4b74-8019-c5acad9b74c7\") " pod="openstack/placement-84d7c4f8cb-75jz5" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.256458 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7285e65f-f435-4b74-8019-c5acad9b74c7-logs\") pod \"placement-84d7c4f8cb-75jz5\" (UID: \"7285e65f-f435-4b74-8019-c5acad9b74c7\") " pod="openstack/placement-84d7c4f8cb-75jz5" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.256510 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4dc4d60-5d83-4f09-986c-a394c44788b5-scripts\") pod \"keystone-5bbc7cbf94-5tkqs\" (UID: \"e4dc4d60-5d83-4f09-986c-a394c44788b5\") " pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.256541 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7285e65f-f435-4b74-8019-c5acad9b74c7-config-data\") pod \"placement-84d7c4f8cb-75jz5\" (UID: \"7285e65f-f435-4b74-8019-c5acad9b74c7\") " pod="openstack/placement-84d7c4f8cb-75jz5" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.256561 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4dc4d60-5d83-4f09-986c-a394c44788b5-internal-tls-certs\") pod \"keystone-5bbc7cbf94-5tkqs\" (UID: \"e4dc4d60-5d83-4f09-986c-a394c44788b5\") " pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.256578 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7285e65f-f435-4b74-8019-c5acad9b74c7-internal-tls-certs\") pod \"placement-84d7c4f8cb-75jz5\" (UID: \"7285e65f-f435-4b74-8019-c5acad9b74c7\") " pod="openstack/placement-84d7c4f8cb-75jz5" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.256601 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7285e65f-f435-4b74-8019-c5acad9b74c7-combined-ca-bundle\") pod \"placement-84d7c4f8cb-75jz5\" (UID: \"7285e65f-f435-4b74-8019-c5acad9b74c7\") " pod="openstack/placement-84d7c4f8cb-75jz5" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.256627 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flvdc\" (UniqueName: \"kubernetes.io/projected/7285e65f-f435-4b74-8019-c5acad9b74c7-kube-api-access-flvdc\") pod \"placement-84d7c4f8cb-75jz5\" (UID: \"7285e65f-f435-4b74-8019-c5acad9b74c7\") " pod="openstack/placement-84d7c4f8cb-75jz5" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.256650 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwzd4\" (UniqueName: \"kubernetes.io/projected/e4dc4d60-5d83-4f09-986c-a394c44788b5-kube-api-access-pwzd4\") pod \"keystone-5bbc7cbf94-5tkqs\" (UID: \"e4dc4d60-5d83-4f09-986c-a394c44788b5\") " pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.256679 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7285e65f-f435-4b74-8019-c5acad9b74c7-scripts\") pod \"placement-84d7c4f8cb-75jz5\" (UID: \"7285e65f-f435-4b74-8019-c5acad9b74c7\") " pod="openstack/placement-84d7c4f8cb-75jz5" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.256694 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4dc4d60-5d83-4f09-986c-a394c44788b5-combined-ca-bundle\") pod \"keystone-5bbc7cbf94-5tkqs\" (UID: \"e4dc4d60-5d83-4f09-986c-a394c44788b5\") " pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.256717 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e4dc4d60-5d83-4f09-986c-a394c44788b5-fernet-keys\") pod \"keystone-5bbc7cbf94-5tkqs\" (UID: \"e4dc4d60-5d83-4f09-986c-a394c44788b5\") " pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.263235 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4dc4d60-5d83-4f09-986c-a394c44788b5-config-data\") pod \"keystone-5bbc7cbf94-5tkqs\" (UID: \"e4dc4d60-5d83-4f09-986c-a394c44788b5\") " pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.263537 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e4dc4d60-5d83-4f09-986c-a394c44788b5-fernet-keys\") pod \"keystone-5bbc7cbf94-5tkqs\" (UID: \"e4dc4d60-5d83-4f09-986c-a394c44788b5\") " pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.269331 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4dc4d60-5d83-4f09-986c-a394c44788b5-combined-ca-bundle\") pod \"keystone-5bbc7cbf94-5tkqs\" (UID: \"e4dc4d60-5d83-4f09-986c-a394c44788b5\") " pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.274550 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4dc4d60-5d83-4f09-986c-a394c44788b5-internal-tls-certs\") pod \"keystone-5bbc7cbf94-5tkqs\" (UID: \"e4dc4d60-5d83-4f09-986c-a394c44788b5\") " pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.277367 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwzd4\" (UniqueName: \"kubernetes.io/projected/e4dc4d60-5d83-4f09-986c-a394c44788b5-kube-api-access-pwzd4\") pod \"keystone-5bbc7cbf94-5tkqs\" (UID: \"e4dc4d60-5d83-4f09-986c-a394c44788b5\") " pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.278932 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e4dc4d60-5d83-4f09-986c-a394c44788b5-credential-keys\") pod \"keystone-5bbc7cbf94-5tkqs\" (UID: \"e4dc4d60-5d83-4f09-986c-a394c44788b5\") " pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.288406 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4dc4d60-5d83-4f09-986c-a394c44788b5-public-tls-certs\") pod \"keystone-5bbc7cbf94-5tkqs\" (UID: \"e4dc4d60-5d83-4f09-986c-a394c44788b5\") " pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.288484 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4dc4d60-5d83-4f09-986c-a394c44788b5-scripts\") pod \"keystone-5bbc7cbf94-5tkqs\" (UID: \"e4dc4d60-5d83-4f09-986c-a394c44788b5\") " pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.354791 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-rxtmf" event={"ID":"79884c8c-f689-46b7-9223-66bd0b7bff8e","Type":"ContainerDied","Data":"a41bd54365902d5084b4ec6202695bd061f8a1b9a88cea50c0b45030a550c94a"} Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.354856 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a41bd54365902d5084b4ec6202695bd061f8a1b9a88cea50c0b45030a550c94a" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.354926 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-rxtmf" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.357936 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"019964d2-ae2a-4501-ad53-c6c0b2260a32","Type":"ContainerStarted","Data":"8be22c2855ed045547ffb2f962f224c3a8f9ae038511c3220f84ae2ef8ecf6e6"} Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.358654 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.358682 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.359047 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flvdc\" (UniqueName: \"kubernetes.io/projected/7285e65f-f435-4b74-8019-c5acad9b74c7-kube-api-access-flvdc\") pod \"placement-84d7c4f8cb-75jz5\" (UID: \"7285e65f-f435-4b74-8019-c5acad9b74c7\") " pod="openstack/placement-84d7c4f8cb-75jz5" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.359098 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7285e65f-f435-4b74-8019-c5acad9b74c7-scripts\") pod \"placement-84d7c4f8cb-75jz5\" (UID: \"7285e65f-f435-4b74-8019-c5acad9b74c7\") " pod="openstack/placement-84d7c4f8cb-75jz5" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.359169 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7285e65f-f435-4b74-8019-c5acad9b74c7-public-tls-certs\") pod \"placement-84d7c4f8cb-75jz5\" (UID: \"7285e65f-f435-4b74-8019-c5acad9b74c7\") " pod="openstack/placement-84d7c4f8cb-75jz5" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.359192 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7285e65f-f435-4b74-8019-c5acad9b74c7-logs\") pod \"placement-84d7c4f8cb-75jz5\" (UID: \"7285e65f-f435-4b74-8019-c5acad9b74c7\") " pod="openstack/placement-84d7c4f8cb-75jz5" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.359238 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7285e65f-f435-4b74-8019-c5acad9b74c7-config-data\") pod \"placement-84d7c4f8cb-75jz5\" (UID: \"7285e65f-f435-4b74-8019-c5acad9b74c7\") " pod="openstack/placement-84d7c4f8cb-75jz5" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.359256 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7285e65f-f435-4b74-8019-c5acad9b74c7-internal-tls-certs\") pod \"placement-84d7c4f8cb-75jz5\" (UID: \"7285e65f-f435-4b74-8019-c5acad9b74c7\") " pod="openstack/placement-84d7c4f8cb-75jz5" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.359281 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7285e65f-f435-4b74-8019-c5acad9b74c7-combined-ca-bundle\") pod \"placement-84d7c4f8cb-75jz5\" (UID: \"7285e65f-f435-4b74-8019-c5acad9b74c7\") " pod="openstack/placement-84d7c4f8cb-75jz5" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.376479 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7285e65f-f435-4b74-8019-c5acad9b74c7-internal-tls-certs\") pod \"placement-84d7c4f8cb-75jz5\" (UID: \"7285e65f-f435-4b74-8019-c5acad9b74c7\") " pod="openstack/placement-84d7c4f8cb-75jz5" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.376709 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7285e65f-f435-4b74-8019-c5acad9b74c7-logs\") pod \"placement-84d7c4f8cb-75jz5\" (UID: \"7285e65f-f435-4b74-8019-c5acad9b74c7\") " pod="openstack/placement-84d7c4f8cb-75jz5" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.377520 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7285e65f-f435-4b74-8019-c5acad9b74c7-combined-ca-bundle\") pod \"placement-84d7c4f8cb-75jz5\" (UID: \"7285e65f-f435-4b74-8019-c5acad9b74c7\") " pod="openstack/placement-84d7c4f8cb-75jz5" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.378681 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7285e65f-f435-4b74-8019-c5acad9b74c7-config-data\") pod \"placement-84d7c4f8cb-75jz5\" (UID: \"7285e65f-f435-4b74-8019-c5acad9b74c7\") " pod="openstack/placement-84d7c4f8cb-75jz5" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.382472 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7285e65f-f435-4b74-8019-c5acad9b74c7-public-tls-certs\") pod \"placement-84d7c4f8cb-75jz5\" (UID: \"7285e65f-f435-4b74-8019-c5acad9b74c7\") " pod="openstack/placement-84d7c4f8cb-75jz5" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.395858 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7285e65f-f435-4b74-8019-c5acad9b74c7-scripts\") pod \"placement-84d7c4f8cb-75jz5\" (UID: \"7285e65f-f435-4b74-8019-c5acad9b74c7\") " pod="openstack/placement-84d7c4f8cb-75jz5" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.405869 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flvdc\" (UniqueName: \"kubernetes.io/projected/7285e65f-f435-4b74-8019-c5acad9b74c7-kube-api-access-flvdc\") pod \"placement-84d7c4f8cb-75jz5\" (UID: \"7285e65f-f435-4b74-8019-c5acad9b74c7\") " pod="openstack/placement-84d7c4f8cb-75jz5" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.444594 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.454959 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-84d7c4f8cb-75jz5" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.536235 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-7d588c7bd-f7dbq"] Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.538386 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7d588c7bd-f7dbq" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.545310 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.545503 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-bq7dn" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.545615 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.588372 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-69584f958c-5wpzz"] Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.589985 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-69584f958c-5wpzz" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.592425 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.615641 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.635894 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-69584f958c-5wpzz"] Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.643513 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7d588c7bd-f7dbq"] Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.651959 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-k8rpf"] Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.673428 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twklz\" (UniqueName: \"kubernetes.io/projected/f3bebe35-d072-4368-ba55-d8415a4f44ef-kube-api-access-twklz\") pod \"barbican-keystone-listener-7d588c7bd-f7dbq\" (UID: \"f3bebe35-d072-4368-ba55-d8415a4f44ef\") " pod="openstack/barbican-keystone-listener-7d588c7bd-f7dbq" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.673532 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f3bebe35-d072-4368-ba55-d8415a4f44ef-config-data-custom\") pod \"barbican-keystone-listener-7d588c7bd-f7dbq\" (UID: \"f3bebe35-d072-4368-ba55-d8415a4f44ef\") " pod="openstack/barbican-keystone-listener-7d588c7bd-f7dbq" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.673556 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3bebe35-d072-4368-ba55-d8415a4f44ef-config-data\") pod \"barbican-keystone-listener-7d588c7bd-f7dbq\" (UID: \"f3bebe35-d072-4368-ba55-d8415a4f44ef\") " pod="openstack/barbican-keystone-listener-7d588c7bd-f7dbq" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.673575 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3bebe35-d072-4368-ba55-d8415a4f44ef-logs\") pod \"barbican-keystone-listener-7d588c7bd-f7dbq\" (UID: \"f3bebe35-d072-4368-ba55-d8415a4f44ef\") " pod="openstack/barbican-keystone-listener-7d588c7bd-f7dbq" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.673611 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3bebe35-d072-4368-ba55-d8415a4f44ef-combined-ca-bundle\") pod \"barbican-keystone-listener-7d588c7bd-f7dbq\" (UID: \"f3bebe35-d072-4368-ba55-d8415a4f44ef\") " pod="openstack/barbican-keystone-listener-7d588c7bd-f7dbq" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.719390 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.719930 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-hg885"] Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.721804 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.721957 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-hg885" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.754161 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-hg885"] Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.771878 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-785c8f8986-64hh9"] Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.774381 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-785c8f8986-64hh9" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.777002 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f74783b1-e062-4c4f-82eb-a7df2387913d-config-data-custom\") pod \"barbican-worker-69584f958c-5wpzz\" (UID: \"f74783b1-e062-4c4f-82eb-a7df2387913d\") " pod="openstack/barbican-worker-69584f958c-5wpzz" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.777045 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twklz\" (UniqueName: \"kubernetes.io/projected/f3bebe35-d072-4368-ba55-d8415a4f44ef-kube-api-access-twklz\") pod \"barbican-keystone-listener-7d588c7bd-f7dbq\" (UID: \"f3bebe35-d072-4368-ba55-d8415a4f44ef\") " pod="openstack/barbican-keystone-listener-7d588c7bd-f7dbq" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.777101 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f74783b1-e062-4c4f-82eb-a7df2387913d-logs\") pod \"barbican-worker-69584f958c-5wpzz\" (UID: \"f74783b1-e062-4c4f-82eb-a7df2387913d\") " pod="openstack/barbican-worker-69584f958c-5wpzz" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.777172 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f3bebe35-d072-4368-ba55-d8415a4f44ef-config-data-custom\") pod \"barbican-keystone-listener-7d588c7bd-f7dbq\" (UID: \"f3bebe35-d072-4368-ba55-d8415a4f44ef\") " pod="openstack/barbican-keystone-listener-7d588c7bd-f7dbq" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.777189 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlhg4\" (UniqueName: \"kubernetes.io/projected/f74783b1-e062-4c4f-82eb-a7df2387913d-kube-api-access-hlhg4\") pod \"barbican-worker-69584f958c-5wpzz\" (UID: \"f74783b1-e062-4c4f-82eb-a7df2387913d\") " pod="openstack/barbican-worker-69584f958c-5wpzz" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.777208 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3bebe35-d072-4368-ba55-d8415a4f44ef-config-data\") pod \"barbican-keystone-listener-7d588c7bd-f7dbq\" (UID: \"f3bebe35-d072-4368-ba55-d8415a4f44ef\") " pod="openstack/barbican-keystone-listener-7d588c7bd-f7dbq" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.777225 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3bebe35-d072-4368-ba55-d8415a4f44ef-logs\") pod \"barbican-keystone-listener-7d588c7bd-f7dbq\" (UID: \"f3bebe35-d072-4368-ba55-d8415a4f44ef\") " pod="openstack/barbican-keystone-listener-7d588c7bd-f7dbq" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.777251 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f74783b1-e062-4c4f-82eb-a7df2387913d-combined-ca-bundle\") pod \"barbican-worker-69584f958c-5wpzz\" (UID: \"f74783b1-e062-4c4f-82eb-a7df2387913d\") " pod="openstack/barbican-worker-69584f958c-5wpzz" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.777298 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3bebe35-d072-4368-ba55-d8415a4f44ef-combined-ca-bundle\") pod \"barbican-keystone-listener-7d588c7bd-f7dbq\" (UID: \"f3bebe35-d072-4368-ba55-d8415a4f44ef\") " pod="openstack/barbican-keystone-listener-7d588c7bd-f7dbq" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.777318 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f74783b1-e062-4c4f-82eb-a7df2387913d-config-data\") pod \"barbican-worker-69584f958c-5wpzz\" (UID: \"f74783b1-e062-4c4f-82eb-a7df2387913d\") " pod="openstack/barbican-worker-69584f958c-5wpzz" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.779062 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.779993 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3bebe35-d072-4368-ba55-d8415a4f44ef-logs\") pod \"barbican-keystone-listener-7d588c7bd-f7dbq\" (UID: \"f3bebe35-d072-4368-ba55-d8415a4f44ef\") " pod="openstack/barbican-keystone-listener-7d588c7bd-f7dbq" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.781335 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-785c8f8986-64hh9"] Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.787394 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3bebe35-d072-4368-ba55-d8415a4f44ef-combined-ca-bundle\") pod \"barbican-keystone-listener-7d588c7bd-f7dbq\" (UID: \"f3bebe35-d072-4368-ba55-d8415a4f44ef\") " pod="openstack/barbican-keystone-listener-7d588c7bd-f7dbq" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.790443 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f3bebe35-d072-4368-ba55-d8415a4f44ef-config-data-custom\") pod \"barbican-keystone-listener-7d588c7bd-f7dbq\" (UID: \"f3bebe35-d072-4368-ba55-d8415a4f44ef\") " pod="openstack/barbican-keystone-listener-7d588c7bd-f7dbq" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.792962 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3bebe35-d072-4368-ba55-d8415a4f44ef-config-data\") pod \"barbican-keystone-listener-7d588c7bd-f7dbq\" (UID: \"f3bebe35-d072-4368-ba55-d8415a4f44ef\") " pod="openstack/barbican-keystone-listener-7d588c7bd-f7dbq" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.800645 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twklz\" (UniqueName: \"kubernetes.io/projected/f3bebe35-d072-4368-ba55-d8415a4f44ef-kube-api-access-twklz\") pod \"barbican-keystone-listener-7d588c7bd-f7dbq\" (UID: \"f3bebe35-d072-4368-ba55-d8415a4f44ef\") " pod="openstack/barbican-keystone-listener-7d588c7bd-f7dbq" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.803642 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-786b4cdb4-z6p8n" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.804721 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-786b4cdb4-z6p8n" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.878184 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7d588c7bd-f7dbq" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.882948 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f74783b1-e062-4c4f-82eb-a7df2387913d-combined-ca-bundle\") pod \"barbican-worker-69584f958c-5wpzz\" (UID: \"f74783b1-e062-4c4f-82eb-a7df2387913d\") " pod="openstack/barbican-worker-69584f958c-5wpzz" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.883005 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-hg885\" (UID: \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\") " pod="openstack/dnsmasq-dns-85ff748b95-hg885" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.883034 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f74783b1-e062-4c4f-82eb-a7df2387913d-config-data\") pod \"barbican-worker-69584f958c-5wpzz\" (UID: \"f74783b1-e062-4c4f-82eb-a7df2387913d\") " pod="openstack/barbican-worker-69584f958c-5wpzz" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.883076 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-config-data\") pod \"barbican-api-785c8f8986-64hh9\" (UID: \"b9f4f1ac-3f4d-470e-95ee-567268c5ad43\") " pod="openstack/barbican-api-785c8f8986-64hh9" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.883117 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-combined-ca-bundle\") pod \"barbican-api-785c8f8986-64hh9\" (UID: \"b9f4f1ac-3f4d-470e-95ee-567268c5ad43\") " pod="openstack/barbican-api-785c8f8986-64hh9" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.883136 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f74783b1-e062-4c4f-82eb-a7df2387913d-config-data-custom\") pod \"barbican-worker-69584f958c-5wpzz\" (UID: \"f74783b1-e062-4c4f-82eb-a7df2387913d\") " pod="openstack/barbican-worker-69584f958c-5wpzz" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.883155 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6r7t2\" (UniqueName: \"kubernetes.io/projected/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-kube-api-access-6r7t2\") pod \"barbican-api-785c8f8986-64hh9\" (UID: \"b9f4f1ac-3f4d-470e-95ee-567268c5ad43\") " pod="openstack/barbican-api-785c8f8986-64hh9" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.883192 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-dns-svc\") pod \"dnsmasq-dns-85ff748b95-hg885\" (UID: \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\") " pod="openstack/dnsmasq-dns-85ff748b95-hg885" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.883214 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-hg885\" (UID: \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\") " pod="openstack/dnsmasq-dns-85ff748b95-hg885" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.883245 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f74783b1-e062-4c4f-82eb-a7df2387913d-logs\") pod \"barbican-worker-69584f958c-5wpzz\" (UID: \"f74783b1-e062-4c4f-82eb-a7df2387913d\") " pod="openstack/barbican-worker-69584f958c-5wpzz" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.883278 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhzfl\" (UniqueName: \"kubernetes.io/projected/6246bdc3-ea30-4c42-ba72-bcee44aa057f-kube-api-access-bhzfl\") pod \"dnsmasq-dns-85ff748b95-hg885\" (UID: \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\") " pod="openstack/dnsmasq-dns-85ff748b95-hg885" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.883320 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-config\") pod \"dnsmasq-dns-85ff748b95-hg885\" (UID: \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\") " pod="openstack/dnsmasq-dns-85ff748b95-hg885" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.883333 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-hg885\" (UID: \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\") " pod="openstack/dnsmasq-dns-85ff748b95-hg885" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.883362 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-logs\") pod \"barbican-api-785c8f8986-64hh9\" (UID: \"b9f4f1ac-3f4d-470e-95ee-567268c5ad43\") " pod="openstack/barbican-api-785c8f8986-64hh9" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.883419 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-config-data-custom\") pod \"barbican-api-785c8f8986-64hh9\" (UID: \"b9f4f1ac-3f4d-470e-95ee-567268c5ad43\") " pod="openstack/barbican-api-785c8f8986-64hh9" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.883447 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlhg4\" (UniqueName: \"kubernetes.io/projected/f74783b1-e062-4c4f-82eb-a7df2387913d-kube-api-access-hlhg4\") pod \"barbican-worker-69584f958c-5wpzz\" (UID: \"f74783b1-e062-4c4f-82eb-a7df2387913d\") " pod="openstack/barbican-worker-69584f958c-5wpzz" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.884700 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f74783b1-e062-4c4f-82eb-a7df2387913d-logs\") pod \"barbican-worker-69584f958c-5wpzz\" (UID: \"f74783b1-e062-4c4f-82eb-a7df2387913d\") " pod="openstack/barbican-worker-69584f958c-5wpzz" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.889095 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f74783b1-e062-4c4f-82eb-a7df2387913d-combined-ca-bundle\") pod \"barbican-worker-69584f958c-5wpzz\" (UID: \"f74783b1-e062-4c4f-82eb-a7df2387913d\") " pod="openstack/barbican-worker-69584f958c-5wpzz" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.889343 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f74783b1-e062-4c4f-82eb-a7df2387913d-config-data\") pod \"barbican-worker-69584f958c-5wpzz\" (UID: \"f74783b1-e062-4c4f-82eb-a7df2387913d\") " pod="openstack/barbican-worker-69584f958c-5wpzz" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.889752 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f74783b1-e062-4c4f-82eb-a7df2387913d-config-data-custom\") pod \"barbican-worker-69584f958c-5wpzz\" (UID: \"f74783b1-e062-4c4f-82eb-a7df2387913d\") " pod="openstack/barbican-worker-69584f958c-5wpzz" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.913435 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlhg4\" (UniqueName: \"kubernetes.io/projected/f74783b1-e062-4c4f-82eb-a7df2387913d-kube-api-access-hlhg4\") pod \"barbican-worker-69584f958c-5wpzz\" (UID: \"f74783b1-e062-4c4f-82eb-a7df2387913d\") " pod="openstack/barbican-worker-69584f958c-5wpzz" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.923208 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-69584f958c-5wpzz" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.984881 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-combined-ca-bundle\") pod \"barbican-api-785c8f8986-64hh9\" (UID: \"b9f4f1ac-3f4d-470e-95ee-567268c5ad43\") " pod="openstack/barbican-api-785c8f8986-64hh9" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.984925 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6r7t2\" (UniqueName: \"kubernetes.io/projected/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-kube-api-access-6r7t2\") pod \"barbican-api-785c8f8986-64hh9\" (UID: \"b9f4f1ac-3f4d-470e-95ee-567268c5ad43\") " pod="openstack/barbican-api-785c8f8986-64hh9" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.984981 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-dns-svc\") pod \"dnsmasq-dns-85ff748b95-hg885\" (UID: \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\") " pod="openstack/dnsmasq-dns-85ff748b95-hg885" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.985016 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-hg885\" (UID: \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\") " pod="openstack/dnsmasq-dns-85ff748b95-hg885" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.985045 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhzfl\" (UniqueName: \"kubernetes.io/projected/6246bdc3-ea30-4c42-ba72-bcee44aa057f-kube-api-access-bhzfl\") pod \"dnsmasq-dns-85ff748b95-hg885\" (UID: \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\") " pod="openstack/dnsmasq-dns-85ff748b95-hg885" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.985078 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-config\") pod \"dnsmasq-dns-85ff748b95-hg885\" (UID: \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\") " pod="openstack/dnsmasq-dns-85ff748b95-hg885" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.985094 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-hg885\" (UID: \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\") " pod="openstack/dnsmasq-dns-85ff748b95-hg885" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.985109 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-logs\") pod \"barbican-api-785c8f8986-64hh9\" (UID: \"b9f4f1ac-3f4d-470e-95ee-567268c5ad43\") " pod="openstack/barbican-api-785c8f8986-64hh9" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.985137 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-config-data-custom\") pod \"barbican-api-785c8f8986-64hh9\" (UID: \"b9f4f1ac-3f4d-470e-95ee-567268c5ad43\") " pod="openstack/barbican-api-785c8f8986-64hh9" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.985190 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-hg885\" (UID: \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\") " pod="openstack/dnsmasq-dns-85ff748b95-hg885" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.985228 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-config-data\") pod \"barbican-api-785c8f8986-64hh9\" (UID: \"b9f4f1ac-3f4d-470e-95ee-567268c5ad43\") " pod="openstack/barbican-api-785c8f8986-64hh9" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.987438 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-hg885\" (UID: \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\") " pod="openstack/dnsmasq-dns-85ff748b95-hg885" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.988146 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-hg885\" (UID: \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\") " pod="openstack/dnsmasq-dns-85ff748b95-hg885" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.988167 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-dns-svc\") pod \"dnsmasq-dns-85ff748b95-hg885\" (UID: \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\") " pod="openstack/dnsmasq-dns-85ff748b95-hg885" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.990455 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-logs\") pod \"barbican-api-785c8f8986-64hh9\" (UID: \"b9f4f1ac-3f4d-470e-95ee-567268c5ad43\") " pod="openstack/barbican-api-785c8f8986-64hh9" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.990499 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-config-data\") pod \"barbican-api-785c8f8986-64hh9\" (UID: \"b9f4f1ac-3f4d-470e-95ee-567268c5ad43\") " pod="openstack/barbican-api-785c8f8986-64hh9" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.994888 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-config\") pod \"dnsmasq-dns-85ff748b95-hg885\" (UID: \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\") " pod="openstack/dnsmasq-dns-85ff748b95-hg885" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.997282 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-config-data-custom\") pod \"barbican-api-785c8f8986-64hh9\" (UID: \"b9f4f1ac-3f4d-470e-95ee-567268c5ad43\") " pod="openstack/barbican-api-785c8f8986-64hh9" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.997584 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-hg885\" (UID: \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\") " pod="openstack/dnsmasq-dns-85ff748b95-hg885" Oct 08 07:34:01 crc kubenswrapper[4693]: I1008 07:34:01.997875 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-combined-ca-bundle\") pod \"barbican-api-785c8f8986-64hh9\" (UID: \"b9f4f1ac-3f4d-470e-95ee-567268c5ad43\") " pod="openstack/barbican-api-785c8f8986-64hh9" Oct 08 07:34:02 crc kubenswrapper[4693]: I1008 07:34:02.008408 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhzfl\" (UniqueName: \"kubernetes.io/projected/6246bdc3-ea30-4c42-ba72-bcee44aa057f-kube-api-access-bhzfl\") pod \"dnsmasq-dns-85ff748b95-hg885\" (UID: \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\") " pod="openstack/dnsmasq-dns-85ff748b95-hg885" Oct 08 07:34:02 crc kubenswrapper[4693]: I1008 07:34:02.015612 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6r7t2\" (UniqueName: \"kubernetes.io/projected/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-kube-api-access-6r7t2\") pod \"barbican-api-785c8f8986-64hh9\" (UID: \"b9f4f1ac-3f4d-470e-95ee-567268c5ad43\") " pod="openstack/barbican-api-785c8f8986-64hh9" Oct 08 07:34:02 crc kubenswrapper[4693]: I1008 07:34:02.055358 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-hg885" Oct 08 07:34:02 crc kubenswrapper[4693]: I1008 07:34:02.113637 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-785c8f8986-64hh9" Oct 08 07:34:02 crc kubenswrapper[4693]: I1008 07:34:02.168141 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5bbc7cbf94-5tkqs"] Oct 08 07:34:02 crc kubenswrapper[4693]: I1008 07:34:02.369856 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5bbc7cbf94-5tkqs" event={"ID":"e4dc4d60-5d83-4f09-986c-a394c44788b5","Type":"ContainerStarted","Data":"66d6ec571c4beda001a4df14830241532c0cb5240b693817b8b875126f5230b3"} Oct 08 07:34:02 crc kubenswrapper[4693]: I1008 07:34:02.369884 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" podUID="9ec230f9-e0bd-4760-aeef-08d63ef6b795" containerName="dnsmasq-dns" containerID="cri-o://23fdbe5298b1e5fbd8540bd37a1bfde51d58e34ee714890f8d60e70751218414" gracePeriod=10 Oct 08 07:34:02 crc kubenswrapper[4693]: I1008 07:34:02.381373 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-84d7c4f8cb-75jz5"] Oct 08 07:34:02 crc kubenswrapper[4693]: I1008 07:34:02.567648 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-69584f958c-5wpzz"] Oct 08 07:34:02 crc kubenswrapper[4693]: I1008 07:34:02.586824 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7d588c7bd-f7dbq"] Oct 08 07:34:02 crc kubenswrapper[4693]: I1008 07:34:02.830861 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-785c8f8986-64hh9"] Oct 08 07:34:02 crc kubenswrapper[4693]: I1008 07:34:02.996617 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-hg885"] Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.141520 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.141788 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.198017 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.318467 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-dns-svc\") pod \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\" (UID: \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\") " Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.318534 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-ovsdbserver-nb\") pod \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\" (UID: \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\") " Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.318651 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-274cl\" (UniqueName: \"kubernetes.io/projected/9ec230f9-e0bd-4760-aeef-08d63ef6b795-kube-api-access-274cl\") pod \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\" (UID: \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\") " Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.318704 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-dns-swift-storage-0\") pod \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\" (UID: \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\") " Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.318755 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-config\") pod \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\" (UID: \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\") " Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.320316 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-ovsdbserver-sb\") pod \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\" (UID: \"9ec230f9-e0bd-4760-aeef-08d63ef6b795\") " Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.345332 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ec230f9-e0bd-4760-aeef-08d63ef6b795-kube-api-access-274cl" (OuterVolumeSpecName: "kube-api-access-274cl") pod "9ec230f9-e0bd-4760-aeef-08d63ef6b795" (UID: "9ec230f9-e0bd-4760-aeef-08d63ef6b795"). InnerVolumeSpecName "kube-api-access-274cl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.404738 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7d588c7bd-f7dbq" event={"ID":"f3bebe35-d072-4368-ba55-d8415a4f44ef","Type":"ContainerStarted","Data":"b4bc0379439e6bb11d2dbc1db673d5b1c690fba6224c4c4a0569f660bd44d674"} Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.414990 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5bbc7cbf94-5tkqs" event={"ID":"e4dc4d60-5d83-4f09-986c-a394c44788b5","Type":"ContainerStarted","Data":"d4eb9b2f8e629a39d9131c7ac23d984f4b33ec55bc2cfc71710ea541505bad69"} Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.416060 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.425019 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-274cl\" (UniqueName: \"kubernetes.io/projected/9ec230f9-e0bd-4760-aeef-08d63ef6b795-kube-api-access-274cl\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.429949 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-hg885" event={"ID":"6246bdc3-ea30-4c42-ba72-bcee44aa057f","Type":"ContainerStarted","Data":"a0adaf76b5b183bc1f61c95609b80b1bd96fa0f00a223ce74ab9b55694f0ee8e"} Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.439023 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-69584f958c-5wpzz" event={"ID":"f74783b1-e062-4c4f-82eb-a7df2387913d","Type":"ContainerStarted","Data":"b64f48fdb1a145f47ea115fd22fe5b685f505323e0549d47afc891a77e55db44"} Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.441706 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-5bbc7cbf94-5tkqs" podStartSLOduration=2.441683532 podStartE2EDuration="2.441683532s" podCreationTimestamp="2025-10-08 07:34:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:34:03.436050963 +0000 UTC m=+1028.807015898" watchObservedRunningTime="2025-10-08 07:34:03.441683532 +0000 UTC m=+1028.812648467" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.472410 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-config" (OuterVolumeSpecName: "config") pod "9ec230f9-e0bd-4760-aeef-08d63ef6b795" (UID: "9ec230f9-e0bd-4760-aeef-08d63ef6b795"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.482498 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-84d7c4f8cb-75jz5" event={"ID":"7285e65f-f435-4b74-8019-c5acad9b74c7","Type":"ContainerStarted","Data":"a9b085af8b51b349c290fe035b353c814875089ecf5afecb00a4f845776e1edd"} Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.482561 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-84d7c4f8cb-75jz5" event={"ID":"7285e65f-f435-4b74-8019-c5acad9b74c7","Type":"ContainerStarted","Data":"902b1760d7adeff507314d4b46b16ab5bc80f71520109dfd3b1a3306be47adf6"} Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.482576 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-84d7c4f8cb-75jz5" event={"ID":"7285e65f-f435-4b74-8019-c5acad9b74c7","Type":"ContainerStarted","Data":"ba0a09419624a3cc294c2cd6f87baa28c3cd6146205385a1192b064fcd8c4cd6"} Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.483602 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-84d7c4f8cb-75jz5" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.483627 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-84d7c4f8cb-75jz5" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.501000 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-785c8f8986-64hh9" event={"ID":"b9f4f1ac-3f4d-470e-95ee-567268c5ad43","Type":"ContainerStarted","Data":"a41fdc74774d8175b31274e2e0ca63f06684adf355b53e4c3d3d2ac63d7fbdd0"} Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.501046 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-785c8f8986-64hh9" event={"ID":"b9f4f1ac-3f4d-470e-95ee-567268c5ad43","Type":"ContainerStarted","Data":"5c875d89fbcaee4d31046a68e3866fb8aa8920d18bb62036646675f00b785613"} Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.514293 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9ec230f9-e0bd-4760-aeef-08d63ef6b795" (UID: "9ec230f9-e0bd-4760-aeef-08d63ef6b795"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.523154 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9ec230f9-e0bd-4760-aeef-08d63ef6b795" (UID: "9ec230f9-e0bd-4760-aeef-08d63ef6b795"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.528475 4693 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.528520 4693 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.528531 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.531032 4693 generic.go:334] "Generic (PLEG): container finished" podID="9ec230f9-e0bd-4760-aeef-08d63ef6b795" containerID="23fdbe5298b1e5fbd8540bd37a1bfde51d58e34ee714890f8d60e70751218414" exitCode=0 Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.531132 4693 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.531136 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.531150 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" event={"ID":"9ec230f9-e0bd-4760-aeef-08d63ef6b795","Type":"ContainerDied","Data":"23fdbe5298b1e5fbd8540bd37a1bfde51d58e34ee714890f8d60e70751218414"} Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.531174 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-k8rpf" event={"ID":"9ec230f9-e0bd-4760-aeef-08d63ef6b795","Type":"ContainerDied","Data":"5bfe6ac4dcc612359f9b1efc021941edd205ed9e16410e97e5cb63e527810a37"} Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.531191 4693 scope.go:117] "RemoveContainer" containerID="23fdbe5298b1e5fbd8540bd37a1bfde51d58e34ee714890f8d60e70751218414" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.531140 4693 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.537231 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "9ec230f9-e0bd-4760-aeef-08d63ef6b795" (UID: "9ec230f9-e0bd-4760-aeef-08d63ef6b795"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.570519 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9ec230f9-e0bd-4760-aeef-08d63ef6b795" (UID: "9ec230f9-e0bd-4760-aeef-08d63ef6b795"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.613229 4693 scope.go:117] "RemoveContainer" containerID="4719a002be72b0ac919a4629317471dd385acad5e34152d64947d9652de199a6" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.629806 4693 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.629845 4693 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9ec230f9-e0bd-4760-aeef-08d63ef6b795-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.670732 4693 scope.go:117] "RemoveContainer" containerID="23fdbe5298b1e5fbd8540bd37a1bfde51d58e34ee714890f8d60e70751218414" Oct 08 07:34:03 crc kubenswrapper[4693]: E1008 07:34:03.671074 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23fdbe5298b1e5fbd8540bd37a1bfde51d58e34ee714890f8d60e70751218414\": container with ID starting with 23fdbe5298b1e5fbd8540bd37a1bfde51d58e34ee714890f8d60e70751218414 not found: ID does not exist" containerID="23fdbe5298b1e5fbd8540bd37a1bfde51d58e34ee714890f8d60e70751218414" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.671111 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23fdbe5298b1e5fbd8540bd37a1bfde51d58e34ee714890f8d60e70751218414"} err="failed to get container status \"23fdbe5298b1e5fbd8540bd37a1bfde51d58e34ee714890f8d60e70751218414\": rpc error: code = NotFound desc = could not find container \"23fdbe5298b1e5fbd8540bd37a1bfde51d58e34ee714890f8d60e70751218414\": container with ID starting with 23fdbe5298b1e5fbd8540bd37a1bfde51d58e34ee714890f8d60e70751218414 not found: ID does not exist" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.671132 4693 scope.go:117] "RemoveContainer" containerID="4719a002be72b0ac919a4629317471dd385acad5e34152d64947d9652de199a6" Oct 08 07:34:03 crc kubenswrapper[4693]: E1008 07:34:03.671376 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4719a002be72b0ac919a4629317471dd385acad5e34152d64947d9652de199a6\": container with ID starting with 4719a002be72b0ac919a4629317471dd385acad5e34152d64947d9652de199a6 not found: ID does not exist" containerID="4719a002be72b0ac919a4629317471dd385acad5e34152d64947d9652de199a6" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.671398 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4719a002be72b0ac919a4629317471dd385acad5e34152d64947d9652de199a6"} err="failed to get container status \"4719a002be72b0ac919a4629317471dd385acad5e34152d64947d9652de199a6\": rpc error: code = NotFound desc = could not find container \"4719a002be72b0ac919a4629317471dd385acad5e34152d64947d9652de199a6\": container with ID starting with 4719a002be72b0ac919a4629317471dd385acad5e34152d64947d9652de199a6 not found: ID does not exist" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.857008 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-84d7c4f8cb-75jz5" podStartSLOduration=2.856980405 podStartE2EDuration="2.856980405s" podCreationTimestamp="2025-10-08 07:34:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:34:03.514739919 +0000 UTC m=+1028.885704854" watchObservedRunningTime="2025-10-08 07:34:03.856980405 +0000 UTC m=+1029.227945340" Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.867840 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-k8rpf"] Oct 08 07:34:03 crc kubenswrapper[4693]: I1008 07:34:03.881714 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-k8rpf"] Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.502462 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.507369 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.551202 4693 generic.go:334] "Generic (PLEG): container finished" podID="6246bdc3-ea30-4c42-ba72-bcee44aa057f" containerID="accbba6b3446310167a262dbe1155ece68e8a14b9fb1586632e34bd0c1d0e5d8" exitCode=0 Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.552727 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-hg885" event={"ID":"6246bdc3-ea30-4c42-ba72-bcee44aa057f","Type":"ContainerDied","Data":"accbba6b3446310167a262dbe1155ece68e8a14b9fb1586632e34bd0c1d0e5d8"} Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.587113 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-785c8f8986-64hh9" event={"ID":"b9f4f1ac-3f4d-470e-95ee-567268c5ad43","Type":"ContainerStarted","Data":"4de8c1d29643d97d308a75c26e8a1e4aadfe7216cac9eda538604c4010815ab5"} Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.588131 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-785c8f8986-64hh9" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.588159 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-785c8f8986-64hh9" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.613503 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-785c8f8986-64hh9" podStartSLOduration=3.613485378 podStartE2EDuration="3.613485378s" podCreationTimestamp="2025-10-08 07:34:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:34:04.606210636 +0000 UTC m=+1029.977175571" watchObservedRunningTime="2025-10-08 07:34:04.613485378 +0000 UTC m=+1029.984450313" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.735324 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-78f9f7b5dd-wdpgb"] Oct 08 07:34:04 crc kubenswrapper[4693]: E1008 07:34:04.735701 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ec230f9-e0bd-4760-aeef-08d63ef6b795" containerName="dnsmasq-dns" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.735717 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ec230f9-e0bd-4760-aeef-08d63ef6b795" containerName="dnsmasq-dns" Oct 08 07:34:04 crc kubenswrapper[4693]: E1008 07:34:04.735749 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ec230f9-e0bd-4760-aeef-08d63ef6b795" containerName="init" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.735755 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ec230f9-e0bd-4760-aeef-08d63ef6b795" containerName="init" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.735935 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ec230f9-e0bd-4760-aeef-08d63ef6b795" containerName="dnsmasq-dns" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.736789 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-78f9f7b5dd-wdpgb" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.739227 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.739337 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.746048 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-78f9f7b5dd-wdpgb"] Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.856735 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bae6b42f-5a3c-4568-b8db-84be1514827e-logs\") pod \"barbican-api-78f9f7b5dd-wdpgb\" (UID: \"bae6b42f-5a3c-4568-b8db-84be1514827e\") " pod="openstack/barbican-api-78f9f7b5dd-wdpgb" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.857109 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bae6b42f-5a3c-4568-b8db-84be1514827e-config-data\") pod \"barbican-api-78f9f7b5dd-wdpgb\" (UID: \"bae6b42f-5a3c-4568-b8db-84be1514827e\") " pod="openstack/barbican-api-78f9f7b5dd-wdpgb" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.857188 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bae6b42f-5a3c-4568-b8db-84be1514827e-internal-tls-certs\") pod \"barbican-api-78f9f7b5dd-wdpgb\" (UID: \"bae6b42f-5a3c-4568-b8db-84be1514827e\") " pod="openstack/barbican-api-78f9f7b5dd-wdpgb" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.857251 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bae6b42f-5a3c-4568-b8db-84be1514827e-combined-ca-bundle\") pod \"barbican-api-78f9f7b5dd-wdpgb\" (UID: \"bae6b42f-5a3c-4568-b8db-84be1514827e\") " pod="openstack/barbican-api-78f9f7b5dd-wdpgb" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.857307 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bae6b42f-5a3c-4568-b8db-84be1514827e-public-tls-certs\") pod \"barbican-api-78f9f7b5dd-wdpgb\" (UID: \"bae6b42f-5a3c-4568-b8db-84be1514827e\") " pod="openstack/barbican-api-78f9f7b5dd-wdpgb" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.857349 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bae6b42f-5a3c-4568-b8db-84be1514827e-config-data-custom\") pod \"barbican-api-78f9f7b5dd-wdpgb\" (UID: \"bae6b42f-5a3c-4568-b8db-84be1514827e\") " pod="openstack/barbican-api-78f9f7b5dd-wdpgb" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.857395 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ld5c\" (UniqueName: \"kubernetes.io/projected/bae6b42f-5a3c-4568-b8db-84be1514827e-kube-api-access-7ld5c\") pod \"barbican-api-78f9f7b5dd-wdpgb\" (UID: \"bae6b42f-5a3c-4568-b8db-84be1514827e\") " pod="openstack/barbican-api-78f9f7b5dd-wdpgb" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.959298 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bae6b42f-5a3c-4568-b8db-84be1514827e-combined-ca-bundle\") pod \"barbican-api-78f9f7b5dd-wdpgb\" (UID: \"bae6b42f-5a3c-4568-b8db-84be1514827e\") " pod="openstack/barbican-api-78f9f7b5dd-wdpgb" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.959361 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bae6b42f-5a3c-4568-b8db-84be1514827e-public-tls-certs\") pod \"barbican-api-78f9f7b5dd-wdpgb\" (UID: \"bae6b42f-5a3c-4568-b8db-84be1514827e\") " pod="openstack/barbican-api-78f9f7b5dd-wdpgb" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.959393 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bae6b42f-5a3c-4568-b8db-84be1514827e-config-data-custom\") pod \"barbican-api-78f9f7b5dd-wdpgb\" (UID: \"bae6b42f-5a3c-4568-b8db-84be1514827e\") " pod="openstack/barbican-api-78f9f7b5dd-wdpgb" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.959427 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ld5c\" (UniqueName: \"kubernetes.io/projected/bae6b42f-5a3c-4568-b8db-84be1514827e-kube-api-access-7ld5c\") pod \"barbican-api-78f9f7b5dd-wdpgb\" (UID: \"bae6b42f-5a3c-4568-b8db-84be1514827e\") " pod="openstack/barbican-api-78f9f7b5dd-wdpgb" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.959457 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bae6b42f-5a3c-4568-b8db-84be1514827e-logs\") pod \"barbican-api-78f9f7b5dd-wdpgb\" (UID: \"bae6b42f-5a3c-4568-b8db-84be1514827e\") " pod="openstack/barbican-api-78f9f7b5dd-wdpgb" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.959484 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bae6b42f-5a3c-4568-b8db-84be1514827e-config-data\") pod \"barbican-api-78f9f7b5dd-wdpgb\" (UID: \"bae6b42f-5a3c-4568-b8db-84be1514827e\") " pod="openstack/barbican-api-78f9f7b5dd-wdpgb" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.959573 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bae6b42f-5a3c-4568-b8db-84be1514827e-internal-tls-certs\") pod \"barbican-api-78f9f7b5dd-wdpgb\" (UID: \"bae6b42f-5a3c-4568-b8db-84be1514827e\") " pod="openstack/barbican-api-78f9f7b5dd-wdpgb" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.960158 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bae6b42f-5a3c-4568-b8db-84be1514827e-logs\") pod \"barbican-api-78f9f7b5dd-wdpgb\" (UID: \"bae6b42f-5a3c-4568-b8db-84be1514827e\") " pod="openstack/barbican-api-78f9f7b5dd-wdpgb" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.968207 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bae6b42f-5a3c-4568-b8db-84be1514827e-config-data-custom\") pod \"barbican-api-78f9f7b5dd-wdpgb\" (UID: \"bae6b42f-5a3c-4568-b8db-84be1514827e\") " pod="openstack/barbican-api-78f9f7b5dd-wdpgb" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.971282 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bae6b42f-5a3c-4568-b8db-84be1514827e-internal-tls-certs\") pod \"barbican-api-78f9f7b5dd-wdpgb\" (UID: \"bae6b42f-5a3c-4568-b8db-84be1514827e\") " pod="openstack/barbican-api-78f9f7b5dd-wdpgb" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.974051 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bae6b42f-5a3c-4568-b8db-84be1514827e-config-data\") pod \"barbican-api-78f9f7b5dd-wdpgb\" (UID: \"bae6b42f-5a3c-4568-b8db-84be1514827e\") " pod="openstack/barbican-api-78f9f7b5dd-wdpgb" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.975786 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bae6b42f-5a3c-4568-b8db-84be1514827e-public-tls-certs\") pod \"barbican-api-78f9f7b5dd-wdpgb\" (UID: \"bae6b42f-5a3c-4568-b8db-84be1514827e\") " pod="openstack/barbican-api-78f9f7b5dd-wdpgb" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.976295 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bae6b42f-5a3c-4568-b8db-84be1514827e-combined-ca-bundle\") pod \"barbican-api-78f9f7b5dd-wdpgb\" (UID: \"bae6b42f-5a3c-4568-b8db-84be1514827e\") " pod="openstack/barbican-api-78f9f7b5dd-wdpgb" Oct 08 07:34:04 crc kubenswrapper[4693]: I1008 07:34:04.984388 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ld5c\" (UniqueName: \"kubernetes.io/projected/bae6b42f-5a3c-4568-b8db-84be1514827e-kube-api-access-7ld5c\") pod \"barbican-api-78f9f7b5dd-wdpgb\" (UID: \"bae6b42f-5a3c-4568-b8db-84be1514827e\") " pod="openstack/barbican-api-78f9f7b5dd-wdpgb" Oct 08 07:34:05 crc kubenswrapper[4693]: I1008 07:34:05.053058 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-78f9f7b5dd-wdpgb" Oct 08 07:34:05 crc kubenswrapper[4693]: I1008 07:34:05.373348 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ec230f9-e0bd-4760-aeef-08d63ef6b795" path="/var/lib/kubelet/pods/9ec230f9-e0bd-4760-aeef-08d63ef6b795/volumes" Oct 08 07:34:05 crc kubenswrapper[4693]: I1008 07:34:05.610868 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-r7p2j" event={"ID":"e1bdcd99-a53d-45ee-b439-57c0e0025fb9","Type":"ContainerStarted","Data":"e6d6086133e430d57e92411d11b7b846138ac1be139229c339e52287578db8e0"} Oct 08 07:34:05 crc kubenswrapper[4693]: I1008 07:34:05.631431 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-r7p2j" podStartSLOduration=3.8994176019999998 podStartE2EDuration="48.631412356s" podCreationTimestamp="2025-10-08 07:33:17 +0000 UTC" firstStartedPulling="2025-10-08 07:33:19.12871953 +0000 UTC m=+984.499684455" lastFinishedPulling="2025-10-08 07:34:03.860714274 +0000 UTC m=+1029.231679209" observedRunningTime="2025-10-08 07:34:05.623418425 +0000 UTC m=+1030.994383360" watchObservedRunningTime="2025-10-08 07:34:05.631412356 +0000 UTC m=+1031.002377291" Oct 08 07:34:06 crc kubenswrapper[4693]: I1008 07:34:06.438393 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-78f9f7b5dd-wdpgb"] Oct 08 07:34:06 crc kubenswrapper[4693]: I1008 07:34:06.625882 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-hg885" event={"ID":"6246bdc3-ea30-4c42-ba72-bcee44aa057f","Type":"ContainerStarted","Data":"991c2d2355b65c138aa7bec6609769fb71b2110caed5d334b7567e75028a23a2"} Oct 08 07:34:06 crc kubenswrapper[4693]: I1008 07:34:06.626764 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-85ff748b95-hg885" Oct 08 07:34:06 crc kubenswrapper[4693]: I1008 07:34:06.637880 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-69584f958c-5wpzz" event={"ID":"f74783b1-e062-4c4f-82eb-a7df2387913d","Type":"ContainerStarted","Data":"e35d2af5ab8befa20421981cb9a925e5001763a8f041f33f9a7adc38d1536cd7"} Oct 08 07:34:06 crc kubenswrapper[4693]: I1008 07:34:06.637943 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-69584f958c-5wpzz" event={"ID":"f74783b1-e062-4c4f-82eb-a7df2387913d","Type":"ContainerStarted","Data":"ff6f488c9afee0ca7553e4cc229e214f105d9858765ce5a4a26850a6eedaf1e3"} Oct 08 07:34:06 crc kubenswrapper[4693]: I1008 07:34:06.644111 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-78f9f7b5dd-wdpgb" event={"ID":"bae6b42f-5a3c-4568-b8db-84be1514827e","Type":"ContainerStarted","Data":"c51d86192e52f1e2b7eb26d9410ade5b5ab8015f2f40e3fdd2cfadb1be855086"} Oct 08 07:34:06 crc kubenswrapper[4693]: I1008 07:34:06.649728 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-85ff748b95-hg885" podStartSLOduration=5.649711572 podStartE2EDuration="5.649711572s" podCreationTimestamp="2025-10-08 07:34:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:34:06.648333546 +0000 UTC m=+1032.019298481" watchObservedRunningTime="2025-10-08 07:34:06.649711572 +0000 UTC m=+1032.020676507" Oct 08 07:34:06 crc kubenswrapper[4693]: I1008 07:34:06.659564 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7d588c7bd-f7dbq" event={"ID":"f3bebe35-d072-4368-ba55-d8415a4f44ef","Type":"ContainerStarted","Data":"fea5a504708d98b6d774e0df5002553be14606eacd12a810e3cba0c49a6569a1"} Oct 08 07:34:06 crc kubenswrapper[4693]: I1008 07:34:06.659624 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7d588c7bd-f7dbq" event={"ID":"f3bebe35-d072-4368-ba55-d8415a4f44ef","Type":"ContainerStarted","Data":"fafc50020d90c426e5b1b3119d5b63165742f8b31ae1d893a3041a0c9af6ad00"} Oct 08 07:34:06 crc kubenswrapper[4693]: I1008 07:34:06.678090 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-69584f958c-5wpzz" podStartSLOduration=2.414625468 podStartE2EDuration="5.6780573s" podCreationTimestamp="2025-10-08 07:34:01 +0000 UTC" firstStartedPulling="2025-10-08 07:34:02.592693201 +0000 UTC m=+1027.963658136" lastFinishedPulling="2025-10-08 07:34:05.856125033 +0000 UTC m=+1031.227089968" observedRunningTime="2025-10-08 07:34:06.673434048 +0000 UTC m=+1032.044398983" watchObservedRunningTime="2025-10-08 07:34:06.6780573 +0000 UTC m=+1032.049022235" Oct 08 07:34:06 crc kubenswrapper[4693]: I1008 07:34:06.694461 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-7d588c7bd-f7dbq" podStartSLOduration=2.490609762 podStartE2EDuration="5.694443922s" podCreationTimestamp="2025-10-08 07:34:01 +0000 UTC" firstStartedPulling="2025-10-08 07:34:02.655888518 +0000 UTC m=+1028.026853453" lastFinishedPulling="2025-10-08 07:34:05.859722678 +0000 UTC m=+1031.230687613" observedRunningTime="2025-10-08 07:34:06.689983965 +0000 UTC m=+1032.060948900" watchObservedRunningTime="2025-10-08 07:34:06.694443922 +0000 UTC m=+1032.065408867" Oct 08 07:34:07 crc kubenswrapper[4693]: I1008 07:34:07.670128 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-78f9f7b5dd-wdpgb" event={"ID":"bae6b42f-5a3c-4568-b8db-84be1514827e","Type":"ContainerStarted","Data":"ddd3ff492e24d2ce21288435e0795c65fbfed137bb13729a926de54a80d59314"} Oct 08 07:34:07 crc kubenswrapper[4693]: I1008 07:34:07.672015 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-78f9f7b5dd-wdpgb" event={"ID":"bae6b42f-5a3c-4568-b8db-84be1514827e","Type":"ContainerStarted","Data":"7a8b7bda2c6031d396b686cabeb83772dcb043747308565b832124caca36566b"} Oct 08 07:34:07 crc kubenswrapper[4693]: I1008 07:34:07.698496 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-78f9f7b5dd-wdpgb" podStartSLOduration=3.698479944 podStartE2EDuration="3.698479944s" podCreationTimestamp="2025-10-08 07:34:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:34:07.690786881 +0000 UTC m=+1033.061751826" watchObservedRunningTime="2025-10-08 07:34:07.698479944 +0000 UTC m=+1033.069444879" Oct 08 07:34:08 crc kubenswrapper[4693]: I1008 07:34:08.682829 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-78f9f7b5dd-wdpgb" Oct 08 07:34:08 crc kubenswrapper[4693]: I1008 07:34:08.684491 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-78f9f7b5dd-wdpgb" Oct 08 07:34:09 crc kubenswrapper[4693]: I1008 07:34:09.691869 4693 generic.go:334] "Generic (PLEG): container finished" podID="e1bdcd99-a53d-45ee-b439-57c0e0025fb9" containerID="e6d6086133e430d57e92411d11b7b846138ac1be139229c339e52287578db8e0" exitCode=0 Oct 08 07:34:09 crc kubenswrapper[4693]: I1008 07:34:09.691919 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-r7p2j" event={"ID":"e1bdcd99-a53d-45ee-b439-57c0e0025fb9","Type":"ContainerDied","Data":"e6d6086133e430d57e92411d11b7b846138ac1be139229c339e52287578db8e0"} Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.568771 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-r7p2j" Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.694588 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-combined-ca-bundle\") pod \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\" (UID: \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\") " Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.695185 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gdmgt\" (UniqueName: \"kubernetes.io/projected/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-kube-api-access-gdmgt\") pod \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\" (UID: \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\") " Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.695328 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-scripts\") pod \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\" (UID: \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\") " Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.695408 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-db-sync-config-data\") pod \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\" (UID: \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\") " Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.695446 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-config-data\") pod \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\" (UID: \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\") " Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.695500 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-etc-machine-id\") pod \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\" (UID: \"e1bdcd99-a53d-45ee-b439-57c0e0025fb9\") " Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.696677 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "e1bdcd99-a53d-45ee-b439-57c0e0025fb9" (UID: "e1bdcd99-a53d-45ee-b439-57c0e0025fb9"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.701097 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-scripts" (OuterVolumeSpecName: "scripts") pod "e1bdcd99-a53d-45ee-b439-57c0e0025fb9" (UID: "e1bdcd99-a53d-45ee-b439-57c0e0025fb9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.702994 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-kube-api-access-gdmgt" (OuterVolumeSpecName: "kube-api-access-gdmgt") pod "e1bdcd99-a53d-45ee-b439-57c0e0025fb9" (UID: "e1bdcd99-a53d-45ee-b439-57c0e0025fb9"). InnerVolumeSpecName "kube-api-access-gdmgt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.708964 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "e1bdcd99-a53d-45ee-b439-57c0e0025fb9" (UID: "e1bdcd99-a53d-45ee-b439-57c0e0025fb9"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.730913 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-647ccf6b96-zrz9s" podUID="4c13d244-5d68-4fdc-834e-90409425f7f4" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.149:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.149:8443: connect: connection refused" Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.740869 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-r7p2j" event={"ID":"e1bdcd99-a53d-45ee-b439-57c0e0025fb9","Type":"ContainerDied","Data":"ee360127a2ec152e3e98351c393aa470cbbebe6c719cffc6b7a7e0e6cbb5ca3b"} Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.741069 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ee360127a2ec152e3e98351c393aa470cbbebe6c719cffc6b7a7e0e6cbb5ca3b" Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.741278 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-r7p2j" Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.743766 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e1bdcd99-a53d-45ee-b439-57c0e0025fb9" (UID: "e1bdcd99-a53d-45ee-b439-57c0e0025fb9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.777310 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-config-data" (OuterVolumeSpecName: "config-data") pod "e1bdcd99-a53d-45ee-b439-57c0e0025fb9" (UID: "e1bdcd99-a53d-45ee-b439-57c0e0025fb9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.797753 4693 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-scripts\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.797785 4693 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.797796 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.797805 4693 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.798486 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.798527 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gdmgt\" (UniqueName: \"kubernetes.io/projected/e1bdcd99-a53d-45ee-b439-57c0e0025fb9-kube-api-access-gdmgt\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.803433 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-786b4cdb4-z6p8n" podUID="1f26734d-12eb-4c6c-9e68-254a30cea3b6" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Oct 08 07:34:11 crc kubenswrapper[4693]: E1008 07:34:11.882595 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="019964d2-ae2a-4501-ad53-c6c0b2260a32" Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.994806 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 08 07:34:11 crc kubenswrapper[4693]: E1008 07:34:11.995205 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1bdcd99-a53d-45ee-b439-57c0e0025fb9" containerName="cinder-db-sync" Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.995221 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1bdcd99-a53d-45ee-b439-57c0e0025fb9" containerName="cinder-db-sync" Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.995394 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1bdcd99-a53d-45ee-b439-57c0e0025fb9" containerName="cinder-db-sync" Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.996319 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 08 07:34:11 crc kubenswrapper[4693]: I1008 07:34:11.999203 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.022237 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.063936 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-85ff748b95-hg885" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.104667 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5fbb923-ed64-4259-8d7c-9d19b190c785-config-data\") pod \"cinder-scheduler-0\" (UID: \"a5fbb923-ed64-4259-8d7c-9d19b190c785\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.104726 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5fbb923-ed64-4259-8d7c-9d19b190c785-scripts\") pod \"cinder-scheduler-0\" (UID: \"a5fbb923-ed64-4259-8d7c-9d19b190c785\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.104751 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a5fbb923-ed64-4259-8d7c-9d19b190c785-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a5fbb923-ed64-4259-8d7c-9d19b190c785\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.104776 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a5fbb923-ed64-4259-8d7c-9d19b190c785-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a5fbb923-ed64-4259-8d7c-9d19b190c785\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.104802 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5fbb923-ed64-4259-8d7c-9d19b190c785-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a5fbb923-ed64-4259-8d7c-9d19b190c785\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.104837 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqm5z\" (UniqueName: \"kubernetes.io/projected/a5fbb923-ed64-4259-8d7c-9d19b190c785-kube-api-access-hqm5z\") pod \"cinder-scheduler-0\" (UID: \"a5fbb923-ed64-4259-8d7c-9d19b190c785\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.136705 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-hg885"] Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.207015 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5fbb923-ed64-4259-8d7c-9d19b190c785-config-data\") pod \"cinder-scheduler-0\" (UID: \"a5fbb923-ed64-4259-8d7c-9d19b190c785\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.207280 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5fbb923-ed64-4259-8d7c-9d19b190c785-scripts\") pod \"cinder-scheduler-0\" (UID: \"a5fbb923-ed64-4259-8d7c-9d19b190c785\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.207398 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a5fbb923-ed64-4259-8d7c-9d19b190c785-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a5fbb923-ed64-4259-8d7c-9d19b190c785\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.207500 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a5fbb923-ed64-4259-8d7c-9d19b190c785-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a5fbb923-ed64-4259-8d7c-9d19b190c785\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.207580 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5fbb923-ed64-4259-8d7c-9d19b190c785-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a5fbb923-ed64-4259-8d7c-9d19b190c785\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.207659 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqm5z\" (UniqueName: \"kubernetes.io/projected/a5fbb923-ed64-4259-8d7c-9d19b190c785-kube-api-access-hqm5z\") pod \"cinder-scheduler-0\" (UID: \"a5fbb923-ed64-4259-8d7c-9d19b190c785\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.209487 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-djbzk"] Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.213954 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a5fbb923-ed64-4259-8d7c-9d19b190c785-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a5fbb923-ed64-4259-8d7c-9d19b190c785\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.217461 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5fbb923-ed64-4259-8d7c-9d19b190c785-scripts\") pod \"cinder-scheduler-0\" (UID: \"a5fbb923-ed64-4259-8d7c-9d19b190c785\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.217776 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.220879 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5fbb923-ed64-4259-8d7c-9d19b190c785-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a5fbb923-ed64-4259-8d7c-9d19b190c785\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.224183 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a5fbb923-ed64-4259-8d7c-9d19b190c785-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a5fbb923-ed64-4259-8d7c-9d19b190c785\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.224630 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5fbb923-ed64-4259-8d7c-9d19b190c785-config-data\") pod \"cinder-scheduler-0\" (UID: \"a5fbb923-ed64-4259-8d7c-9d19b190c785\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.230468 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-djbzk"] Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.267383 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqm5z\" (UniqueName: \"kubernetes.io/projected/a5fbb923-ed64-4259-8d7c-9d19b190c785-kube-api-access-hqm5z\") pod \"cinder-scheduler-0\" (UID: \"a5fbb923-ed64-4259-8d7c-9d19b190c785\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.272871 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.274659 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.277804 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.310154 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49slt\" (UniqueName: \"kubernetes.io/projected/17141a5d-2e95-4670-9852-ef9ba4e6fb77-kube-api-access-49slt\") pod \"dnsmasq-dns-5c9776ccc5-djbzk\" (UID: \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\") " pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.310214 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-djbzk\" (UID: \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\") " pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.310257 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-djbzk\" (UID: \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\") " pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.310276 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-djbzk\" (UID: \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\") " pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.310301 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-config\") pod \"dnsmasq-dns-5c9776ccc5-djbzk\" (UID: \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\") " pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.310315 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-djbzk\" (UID: \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\") " pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.316367 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.321543 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.413622 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " pod="openstack/cinder-api-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.413658 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-logs\") pod \"cinder-api-0\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " pod="openstack/cinder-api-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.413721 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49slt\" (UniqueName: \"kubernetes.io/projected/17141a5d-2e95-4670-9852-ef9ba4e6fb77-kube-api-access-49slt\") pod \"dnsmasq-dns-5c9776ccc5-djbzk\" (UID: \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\") " pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.413758 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-djbzk\" (UID: \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\") " pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.413790 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-djbzk\" (UID: \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\") " pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.413806 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-config-data-custom\") pod \"cinder-api-0\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " pod="openstack/cinder-api-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.413864 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-djbzk\" (UID: \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\") " pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.413880 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-config-data\") pod \"cinder-api-0\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " pod="openstack/cinder-api-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.413901 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-config\") pod \"dnsmasq-dns-5c9776ccc5-djbzk\" (UID: \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\") " pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.413916 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-djbzk\" (UID: \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\") " pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.413933 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgh2d\" (UniqueName: \"kubernetes.io/projected/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-kube-api-access-mgh2d\") pod \"cinder-api-0\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " pod="openstack/cinder-api-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.413948 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-scripts\") pod \"cinder-api-0\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " pod="openstack/cinder-api-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.413982 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " pod="openstack/cinder-api-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.423480 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-djbzk\" (UID: \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\") " pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.423549 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-djbzk\" (UID: \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\") " pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.424049 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-djbzk\" (UID: \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\") " pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.424493 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-djbzk\" (UID: \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\") " pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.425035 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-config\") pod \"dnsmasq-dns-5c9776ccc5-djbzk\" (UID: \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\") " pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.444083 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49slt\" (UniqueName: \"kubernetes.io/projected/17141a5d-2e95-4670-9852-ef9ba4e6fb77-kube-api-access-49slt\") pod \"dnsmasq-dns-5c9776ccc5-djbzk\" (UID: \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\") " pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.517792 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " pod="openstack/cinder-api-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.518097 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-logs\") pod \"cinder-api-0\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " pod="openstack/cinder-api-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.518241 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-config-data-custom\") pod \"cinder-api-0\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " pod="openstack/cinder-api-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.518263 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-config-data\") pod \"cinder-api-0\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " pod="openstack/cinder-api-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.518298 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgh2d\" (UniqueName: \"kubernetes.io/projected/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-kube-api-access-mgh2d\") pod \"cinder-api-0\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " pod="openstack/cinder-api-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.518315 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-scripts\") pod \"cinder-api-0\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " pod="openstack/cinder-api-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.518366 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " pod="openstack/cinder-api-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.520143 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " pod="openstack/cinder-api-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.520417 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-logs\") pod \"cinder-api-0\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " pod="openstack/cinder-api-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.524457 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-config-data\") pod \"cinder-api-0\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " pod="openstack/cinder-api-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.530639 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " pod="openstack/cinder-api-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.548454 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgh2d\" (UniqueName: \"kubernetes.io/projected/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-kube-api-access-mgh2d\") pod \"cinder-api-0\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " pod="openstack/cinder-api-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.549121 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.549918 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-scripts\") pod \"cinder-api-0\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " pod="openstack/cinder-api-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.557511 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-config-data-custom\") pod \"cinder-api-0\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " pod="openstack/cinder-api-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.609876 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.782300 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-85ff748b95-hg885" podUID="6246bdc3-ea30-4c42-ba72-bcee44aa057f" containerName="dnsmasq-dns" containerID="cri-o://991c2d2355b65c138aa7bec6609769fb71b2110caed5d334b7567e75028a23a2" gracePeriod=10 Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.782671 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="019964d2-ae2a-4501-ad53-c6c0b2260a32" containerName="ceilometer-notification-agent" containerID="cri-o://2696cae1cd9f5a2788793af3e3f4fd8afcfc002dfd3117070c05fa4d9af2521a" gracePeriod=30 Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.782731 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="019964d2-ae2a-4501-ad53-c6c0b2260a32" containerName="sg-core" containerID="cri-o://8be22c2855ed045547ffb2f962f224c3a8f9ae038511c3220f84ae2ef8ecf6e6" gracePeriod=30 Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.782751 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="019964d2-ae2a-4501-ad53-c6c0b2260a32" containerName="proxy-httpd" containerID="cri-o://7003a2eddf97581b946ef96192b37075c1eb6454a9cba0f882dbaad917583b70" gracePeriod=30 Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.782771 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"019964d2-ae2a-4501-ad53-c6c0b2260a32","Type":"ContainerStarted","Data":"7003a2eddf97581b946ef96192b37075c1eb6454a9cba0f882dbaad917583b70"} Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.782959 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 08 07:34:12 crc kubenswrapper[4693]: I1008 07:34:12.889577 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 08 07:34:13 crc kubenswrapper[4693]: I1008 07:34:13.119515 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-djbzk"] Oct 08 07:34:13 crc kubenswrapper[4693]: I1008 07:34:13.231450 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 08 07:34:13 crc kubenswrapper[4693]: I1008 07:34:13.793970 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a5fbb923-ed64-4259-8d7c-9d19b190c785","Type":"ContainerStarted","Data":"d48c4dc770abba6db43ad7e0f125858a5854b31511abbb35323183c47301c8fd"} Oct 08 07:34:13 crc kubenswrapper[4693]: I1008 07:34:13.796920 4693 generic.go:334] "Generic (PLEG): container finished" podID="6246bdc3-ea30-4c42-ba72-bcee44aa057f" containerID="991c2d2355b65c138aa7bec6609769fb71b2110caed5d334b7567e75028a23a2" exitCode=0 Oct 08 07:34:13 crc kubenswrapper[4693]: I1008 07:34:13.796987 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-hg885" event={"ID":"6246bdc3-ea30-4c42-ba72-bcee44aa057f","Type":"ContainerDied","Data":"991c2d2355b65c138aa7bec6609769fb71b2110caed5d334b7567e75028a23a2"} Oct 08 07:34:13 crc kubenswrapper[4693]: I1008 07:34:13.798420 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" event={"ID":"17141a5d-2e95-4670-9852-ef9ba4e6fb77","Type":"ContainerStarted","Data":"0513da7ac1d68f8baac8bb49f5df1f5a14e91fa89bbf04bbf71856b753002fae"} Oct 08 07:34:13 crc kubenswrapper[4693]: I1008 07:34:13.804627 4693 generic.go:334] "Generic (PLEG): container finished" podID="019964d2-ae2a-4501-ad53-c6c0b2260a32" containerID="7003a2eddf97581b946ef96192b37075c1eb6454a9cba0f882dbaad917583b70" exitCode=0 Oct 08 07:34:13 crc kubenswrapper[4693]: I1008 07:34:13.804658 4693 generic.go:334] "Generic (PLEG): container finished" podID="019964d2-ae2a-4501-ad53-c6c0b2260a32" containerID="8be22c2855ed045547ffb2f962f224c3a8f9ae038511c3220f84ae2ef8ecf6e6" exitCode=2 Oct 08 07:34:13 crc kubenswrapper[4693]: I1008 07:34:13.804677 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"019964d2-ae2a-4501-ad53-c6c0b2260a32","Type":"ContainerDied","Data":"7003a2eddf97581b946ef96192b37075c1eb6454a9cba0f882dbaad917583b70"} Oct 08 07:34:13 crc kubenswrapper[4693]: I1008 07:34:13.804700 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"019964d2-ae2a-4501-ad53-c6c0b2260a32","Type":"ContainerDied","Data":"8be22c2855ed045547ffb2f962f224c3a8f9ae038511c3220f84ae2ef8ecf6e6"} Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.027195 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-785c8f8986-64hh9" Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.210675 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.553670 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-hg885" Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.672602 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-config\") pod \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\" (UID: \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\") " Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.672802 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-ovsdbserver-sb\") pod \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\" (UID: \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\") " Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.672915 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-ovsdbserver-nb\") pod \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\" (UID: \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\") " Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.672954 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bhzfl\" (UniqueName: \"kubernetes.io/projected/6246bdc3-ea30-4c42-ba72-bcee44aa057f-kube-api-access-bhzfl\") pod \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\" (UID: \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\") " Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.672975 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-dns-svc\") pod \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\" (UID: \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\") " Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.673064 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-dns-swift-storage-0\") pod \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\" (UID: \"6246bdc3-ea30-4c42-ba72-bcee44aa057f\") " Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.683559 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6246bdc3-ea30-4c42-ba72-bcee44aa057f-kube-api-access-bhzfl" (OuterVolumeSpecName: "kube-api-access-bhzfl") pod "6246bdc3-ea30-4c42-ba72-bcee44aa057f" (UID: "6246bdc3-ea30-4c42-ba72-bcee44aa057f"). InnerVolumeSpecName "kube-api-access-bhzfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.742037 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-config" (OuterVolumeSpecName: "config") pod "6246bdc3-ea30-4c42-ba72-bcee44aa057f" (UID: "6246bdc3-ea30-4c42-ba72-bcee44aa057f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.775560 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bhzfl\" (UniqueName: \"kubernetes.io/projected/6246bdc3-ea30-4c42-ba72-bcee44aa057f-kube-api-access-bhzfl\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.775591 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.781325 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6246bdc3-ea30-4c42-ba72-bcee44aa057f" (UID: "6246bdc3-ea30-4c42-ba72-bcee44aa057f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.786996 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6246bdc3-ea30-4c42-ba72-bcee44aa057f" (UID: "6246bdc3-ea30-4c42-ba72-bcee44aa057f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.792074 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6246bdc3-ea30-4c42-ba72-bcee44aa057f" (UID: "6246bdc3-ea30-4c42-ba72-bcee44aa057f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.792657 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-785c8f8986-64hh9" Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.825274 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "6246bdc3-ea30-4c42-ba72-bcee44aa057f" (UID: "6246bdc3-ea30-4c42-ba72-bcee44aa057f"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.866301 4693 generic.go:334] "Generic (PLEG): container finished" podID="17141a5d-2e95-4670-9852-ef9ba4e6fb77" containerID="ada8cadfe9172ba72d823ca5bba6db93adb347eb8642aac1c9df98bc9f08398a" exitCode=0 Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.866380 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" event={"ID":"17141a5d-2e95-4670-9852-ef9ba4e6fb77","Type":"ContainerDied","Data":"ada8cadfe9172ba72d823ca5bba6db93adb347eb8642aac1c9df98bc9f08398a"} Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.874007 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8cdf4823-1b2c-44d9-bce9-5da6e5b87999","Type":"ContainerStarted","Data":"01f0bfc395b7ce934cdd4f6b3ce34ff3702725b37b08e24fc4274576bdb0e259"} Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.876863 4693 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.876881 4693 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.876890 4693 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.876907 4693 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6246bdc3-ea30-4c42-ba72-bcee44aa057f-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.878623 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-hg885" Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.878724 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-hg885" event={"ID":"6246bdc3-ea30-4c42-ba72-bcee44aa057f","Type":"ContainerDied","Data":"a0adaf76b5b183bc1f61c95609b80b1bd96fa0f00a223ce74ab9b55694f0ee8e"} Oct 08 07:34:14 crc kubenswrapper[4693]: I1008 07:34:14.878789 4693 scope.go:117] "RemoveContainer" containerID="991c2d2355b65c138aa7bec6609769fb71b2110caed5d334b7567e75028a23a2" Oct 08 07:34:15 crc kubenswrapper[4693]: I1008 07:34:15.000032 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-hg885"] Oct 08 07:34:15 crc kubenswrapper[4693]: I1008 07:34:15.024118 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-hg885"] Oct 08 07:34:15 crc kubenswrapper[4693]: I1008 07:34:15.079105 4693 scope.go:117] "RemoveContainer" containerID="accbba6b3446310167a262dbe1155ece68e8a14b9fb1586632e34bd0c1d0e5d8" Oct 08 07:34:15 crc kubenswrapper[4693]: I1008 07:34:15.436089 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6246bdc3-ea30-4c42-ba72-bcee44aa057f" path="/var/lib/kubelet/pods/6246bdc3-ea30-4c42-ba72-bcee44aa057f/volumes" Oct 08 07:34:15 crc kubenswrapper[4693]: I1008 07:34:15.901609 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8cdf4823-1b2c-44d9-bce9-5da6e5b87999","Type":"ContainerStarted","Data":"5388b3faed3f126af29ce70d760014810fe640139278794126d1361a2f39d6e3"} Oct 08 07:34:15 crc kubenswrapper[4693]: I1008 07:34:15.904469 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a5fbb923-ed64-4259-8d7c-9d19b190c785","Type":"ContainerStarted","Data":"6dc75089e409c7dc0e1171bd9a0b0725c527e230b65d82b04805d9cf65fb962f"} Oct 08 07:34:15 crc kubenswrapper[4693]: I1008 07:34:15.914893 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" event={"ID":"17141a5d-2e95-4670-9852-ef9ba4e6fb77","Type":"ContainerStarted","Data":"e276d37c9a586c2227b56ecf8d80f65d6b6a4df5947a099b1d46c79b7e047c16"} Oct 08 07:34:15 crc kubenswrapper[4693]: I1008 07:34:15.915081 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.682516 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.720730 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" podStartSLOduration=4.720709365 podStartE2EDuration="4.720709365s" podCreationTimestamp="2025-10-08 07:34:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:34:15.932586149 +0000 UTC m=+1041.303551084" watchObservedRunningTime="2025-10-08 07:34:16.720709365 +0000 UTC m=+1042.091674300" Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.721190 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-78f9f7b5dd-wdpgb" Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.831376 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/019964d2-ae2a-4501-ad53-c6c0b2260a32-run-httpd\") pod \"019964d2-ae2a-4501-ad53-c6c0b2260a32\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.831725 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/019964d2-ae2a-4501-ad53-c6c0b2260a32-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "019964d2-ae2a-4501-ad53-c6c0b2260a32" (UID: "019964d2-ae2a-4501-ad53-c6c0b2260a32"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.831840 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b6ff9\" (UniqueName: \"kubernetes.io/projected/019964d2-ae2a-4501-ad53-c6c0b2260a32-kube-api-access-b6ff9\") pod \"019964d2-ae2a-4501-ad53-c6c0b2260a32\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.831944 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/019964d2-ae2a-4501-ad53-c6c0b2260a32-log-httpd\") pod \"019964d2-ae2a-4501-ad53-c6c0b2260a32\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.832100 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/019964d2-ae2a-4501-ad53-c6c0b2260a32-combined-ca-bundle\") pod \"019964d2-ae2a-4501-ad53-c6c0b2260a32\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.832275 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/019964d2-ae2a-4501-ad53-c6c0b2260a32-config-data\") pod \"019964d2-ae2a-4501-ad53-c6c0b2260a32\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.832371 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/019964d2-ae2a-4501-ad53-c6c0b2260a32-sg-core-conf-yaml\") pod \"019964d2-ae2a-4501-ad53-c6c0b2260a32\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.832448 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/019964d2-ae2a-4501-ad53-c6c0b2260a32-scripts\") pod \"019964d2-ae2a-4501-ad53-c6c0b2260a32\" (UID: \"019964d2-ae2a-4501-ad53-c6c0b2260a32\") " Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.832268 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/019964d2-ae2a-4501-ad53-c6c0b2260a32-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "019964d2-ae2a-4501-ad53-c6c0b2260a32" (UID: "019964d2-ae2a-4501-ad53-c6c0b2260a32"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.832957 4693 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/019964d2-ae2a-4501-ad53-c6c0b2260a32-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.833056 4693 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/019964d2-ae2a-4501-ad53-c6c0b2260a32-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.852022 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/019964d2-ae2a-4501-ad53-c6c0b2260a32-kube-api-access-b6ff9" (OuterVolumeSpecName: "kube-api-access-b6ff9") pod "019964d2-ae2a-4501-ad53-c6c0b2260a32" (UID: "019964d2-ae2a-4501-ad53-c6c0b2260a32"). InnerVolumeSpecName "kube-api-access-b6ff9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.858957 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/019964d2-ae2a-4501-ad53-c6c0b2260a32-scripts" (OuterVolumeSpecName: "scripts") pod "019964d2-ae2a-4501-ad53-c6c0b2260a32" (UID: "019964d2-ae2a-4501-ad53-c6c0b2260a32"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.901993 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/019964d2-ae2a-4501-ad53-c6c0b2260a32-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "019964d2-ae2a-4501-ad53-c6c0b2260a32" (UID: "019964d2-ae2a-4501-ad53-c6c0b2260a32"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.935948 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/019964d2-ae2a-4501-ad53-c6c0b2260a32-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "019964d2-ae2a-4501-ad53-c6c0b2260a32" (UID: "019964d2-ae2a-4501-ad53-c6c0b2260a32"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.936139 4693 generic.go:334] "Generic (PLEG): container finished" podID="019964d2-ae2a-4501-ad53-c6c0b2260a32" containerID="2696cae1cd9f5a2788793af3e3f4fd8afcfc002dfd3117070c05fa4d9af2521a" exitCode=0 Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.936268 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"019964d2-ae2a-4501-ad53-c6c0b2260a32","Type":"ContainerDied","Data":"2696cae1cd9f5a2788793af3e3f4fd8afcfc002dfd3117070c05fa4d9af2521a"} Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.936319 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"019964d2-ae2a-4501-ad53-c6c0b2260a32","Type":"ContainerDied","Data":"e2985ddb30e6886812d13f7075db5e5b21e23c8425eaeb3f0fd98c4ebe00a316"} Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.936338 4693 scope.go:117] "RemoveContainer" containerID="7003a2eddf97581b946ef96192b37075c1eb6454a9cba0f882dbaad917583b70" Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.936281 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.937736 4693 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/019964d2-ae2a-4501-ad53-c6c0b2260a32-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.937921 4693 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/019964d2-ae2a-4501-ad53-c6c0b2260a32-scripts\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.937989 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b6ff9\" (UniqueName: \"kubernetes.io/projected/019964d2-ae2a-4501-ad53-c6c0b2260a32-kube-api-access-b6ff9\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.938047 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/019964d2-ae2a-4501-ad53-c6c0b2260a32-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.940848 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8cdf4823-1b2c-44d9-bce9-5da6e5b87999","Type":"ContainerStarted","Data":"85051501159f1026cda76e8a44c237f65b7c8dd9a4135e10e0ac7c2fcb42409e"} Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.941074 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="8cdf4823-1b2c-44d9-bce9-5da6e5b87999" containerName="cinder-api-log" containerID="cri-o://5388b3faed3f126af29ce70d760014810fe640139278794126d1361a2f39d6e3" gracePeriod=30 Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.941462 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.941827 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="8cdf4823-1b2c-44d9-bce9-5da6e5b87999" containerName="cinder-api" containerID="cri-o://85051501159f1026cda76e8a44c237f65b7c8dd9a4135e10e0ac7c2fcb42409e" gracePeriod=30 Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.953907 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a5fbb923-ed64-4259-8d7c-9d19b190c785","Type":"ContainerStarted","Data":"0e3dce039fc6cd8d2c72e2996b77dc3c929665046e5e5051955506101f934f11"} Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.982657 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/019964d2-ae2a-4501-ad53-c6c0b2260a32-config-data" (OuterVolumeSpecName: "config-data") pod "019964d2-ae2a-4501-ad53-c6c0b2260a32" (UID: "019964d2-ae2a-4501-ad53-c6c0b2260a32"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:16 crc kubenswrapper[4693]: I1008 07:34:16.999571 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.999552931 podStartE2EDuration="4.999552931s" podCreationTimestamp="2025-10-08 07:34:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:34:16.962979494 +0000 UTC m=+1042.333944429" watchObservedRunningTime="2025-10-08 07:34:16.999552931 +0000 UTC m=+1042.370517856" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.001248 4693 scope.go:117] "RemoveContainer" containerID="8be22c2855ed045547ffb2f962f224c3a8f9ae038511c3220f84ae2ef8ecf6e6" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.005581 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.540233017 podStartE2EDuration="6.00556966s" podCreationTimestamp="2025-10-08 07:34:11 +0000 UTC" firstStartedPulling="2025-10-08 07:34:12.978036999 +0000 UTC m=+1038.349001934" lastFinishedPulling="2025-10-08 07:34:14.443373622 +0000 UTC m=+1039.814338577" observedRunningTime="2025-10-08 07:34:16.980384707 +0000 UTC m=+1042.351349642" watchObservedRunningTime="2025-10-08 07:34:17.00556966 +0000 UTC m=+1042.376534605" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.023365 4693 scope.go:117] "RemoveContainer" containerID="2696cae1cd9f5a2788793af3e3f4fd8afcfc002dfd3117070c05fa4d9af2521a" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.039860 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/019964d2-ae2a-4501-ad53-c6c0b2260a32-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.042132 4693 scope.go:117] "RemoveContainer" containerID="7003a2eddf97581b946ef96192b37075c1eb6454a9cba0f882dbaad917583b70" Oct 08 07:34:17 crc kubenswrapper[4693]: E1008 07:34:17.042600 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7003a2eddf97581b946ef96192b37075c1eb6454a9cba0f882dbaad917583b70\": container with ID starting with 7003a2eddf97581b946ef96192b37075c1eb6454a9cba0f882dbaad917583b70 not found: ID does not exist" containerID="7003a2eddf97581b946ef96192b37075c1eb6454a9cba0f882dbaad917583b70" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.042638 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7003a2eddf97581b946ef96192b37075c1eb6454a9cba0f882dbaad917583b70"} err="failed to get container status \"7003a2eddf97581b946ef96192b37075c1eb6454a9cba0f882dbaad917583b70\": rpc error: code = NotFound desc = could not find container \"7003a2eddf97581b946ef96192b37075c1eb6454a9cba0f882dbaad917583b70\": container with ID starting with 7003a2eddf97581b946ef96192b37075c1eb6454a9cba0f882dbaad917583b70 not found: ID does not exist" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.042665 4693 scope.go:117] "RemoveContainer" containerID="8be22c2855ed045547ffb2f962f224c3a8f9ae038511c3220f84ae2ef8ecf6e6" Oct 08 07:34:17 crc kubenswrapper[4693]: E1008 07:34:17.043050 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8be22c2855ed045547ffb2f962f224c3a8f9ae038511c3220f84ae2ef8ecf6e6\": container with ID starting with 8be22c2855ed045547ffb2f962f224c3a8f9ae038511c3220f84ae2ef8ecf6e6 not found: ID does not exist" containerID="8be22c2855ed045547ffb2f962f224c3a8f9ae038511c3220f84ae2ef8ecf6e6" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.043079 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8be22c2855ed045547ffb2f962f224c3a8f9ae038511c3220f84ae2ef8ecf6e6"} err="failed to get container status \"8be22c2855ed045547ffb2f962f224c3a8f9ae038511c3220f84ae2ef8ecf6e6\": rpc error: code = NotFound desc = could not find container \"8be22c2855ed045547ffb2f962f224c3a8f9ae038511c3220f84ae2ef8ecf6e6\": container with ID starting with 8be22c2855ed045547ffb2f962f224c3a8f9ae038511c3220f84ae2ef8ecf6e6 not found: ID does not exist" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.043101 4693 scope.go:117] "RemoveContainer" containerID="2696cae1cd9f5a2788793af3e3f4fd8afcfc002dfd3117070c05fa4d9af2521a" Oct 08 07:34:17 crc kubenswrapper[4693]: E1008 07:34:17.043411 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2696cae1cd9f5a2788793af3e3f4fd8afcfc002dfd3117070c05fa4d9af2521a\": container with ID starting with 2696cae1cd9f5a2788793af3e3f4fd8afcfc002dfd3117070c05fa4d9af2521a not found: ID does not exist" containerID="2696cae1cd9f5a2788793af3e3f4fd8afcfc002dfd3117070c05fa4d9af2521a" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.043429 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2696cae1cd9f5a2788793af3e3f4fd8afcfc002dfd3117070c05fa4d9af2521a"} err="failed to get container status \"2696cae1cd9f5a2788793af3e3f4fd8afcfc002dfd3117070c05fa4d9af2521a\": rpc error: code = NotFound desc = could not find container \"2696cae1cd9f5a2788793af3e3f4fd8afcfc002dfd3117070c05fa4d9af2521a\": container with ID starting with 2696cae1cd9f5a2788793af3e3f4fd8afcfc002dfd3117070c05fa4d9af2521a not found: ID does not exist" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.321933 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.360926 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.428712 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-78f9f7b5dd-wdpgb" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.431422 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.440923 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:34:17 crc kubenswrapper[4693]: E1008 07:34:17.441520 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6246bdc3-ea30-4c42-ba72-bcee44aa057f" containerName="dnsmasq-dns" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.441536 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="6246bdc3-ea30-4c42-ba72-bcee44aa057f" containerName="dnsmasq-dns" Oct 08 07:34:17 crc kubenswrapper[4693]: E1008 07:34:17.441549 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6246bdc3-ea30-4c42-ba72-bcee44aa057f" containerName="init" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.441556 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="6246bdc3-ea30-4c42-ba72-bcee44aa057f" containerName="init" Oct 08 07:34:17 crc kubenswrapper[4693]: E1008 07:34:17.441576 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="019964d2-ae2a-4501-ad53-c6c0b2260a32" containerName="proxy-httpd" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.441583 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="019964d2-ae2a-4501-ad53-c6c0b2260a32" containerName="proxy-httpd" Oct 08 07:34:17 crc kubenswrapper[4693]: E1008 07:34:17.441601 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="019964d2-ae2a-4501-ad53-c6c0b2260a32" containerName="ceilometer-notification-agent" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.441607 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="019964d2-ae2a-4501-ad53-c6c0b2260a32" containerName="ceilometer-notification-agent" Oct 08 07:34:17 crc kubenswrapper[4693]: E1008 07:34:17.441617 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="019964d2-ae2a-4501-ad53-c6c0b2260a32" containerName="sg-core" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.441624 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="019964d2-ae2a-4501-ad53-c6c0b2260a32" containerName="sg-core" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.441791 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="019964d2-ae2a-4501-ad53-c6c0b2260a32" containerName="proxy-httpd" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.441830 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="019964d2-ae2a-4501-ad53-c6c0b2260a32" containerName="ceilometer-notification-agent" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.441839 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="019964d2-ae2a-4501-ad53-c6c0b2260a32" containerName="sg-core" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.441849 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="6246bdc3-ea30-4c42-ba72-bcee44aa057f" containerName="dnsmasq-dns" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.443466 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.447527 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.450029 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.451063 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.515835 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-785c8f8986-64hh9"] Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.516242 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-785c8f8986-64hh9" podUID="b9f4f1ac-3f4d-470e-95ee-567268c5ad43" containerName="barbican-api" containerID="cri-o://4de8c1d29643d97d308a75c26e8a1e4aadfe7216cac9eda538604c4010815ab5" gracePeriod=30 Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.516083 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-785c8f8986-64hh9" podUID="b9f4f1ac-3f4d-470e-95ee-567268c5ad43" containerName="barbican-api-log" containerID="cri-o://a41fdc74774d8175b31274e2e0ca63f06684adf355b53e4c3d3d2ac63d7fbdd0" gracePeriod=30 Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.555378 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/35069ebb-6578-40d8-b9c6-5183fee2f040-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " pod="openstack/ceilometer-0" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.555422 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/35069ebb-6578-40d8-b9c6-5183fee2f040-run-httpd\") pod \"ceilometer-0\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " pod="openstack/ceilometer-0" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.555450 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/35069ebb-6578-40d8-b9c6-5183fee2f040-log-httpd\") pod \"ceilometer-0\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " pod="openstack/ceilometer-0" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.555487 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35069ebb-6578-40d8-b9c6-5183fee2f040-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " pod="openstack/ceilometer-0" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.555564 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35069ebb-6578-40d8-b9c6-5183fee2f040-scripts\") pod \"ceilometer-0\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " pod="openstack/ceilometer-0" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.555584 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bz6g5\" (UniqueName: \"kubernetes.io/projected/35069ebb-6578-40d8-b9c6-5183fee2f040-kube-api-access-bz6g5\") pod \"ceilometer-0\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " pod="openstack/ceilometer-0" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.555628 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35069ebb-6578-40d8-b9c6-5183fee2f040-config-data\") pod \"ceilometer-0\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " pod="openstack/ceilometer-0" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.588680 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.656630 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mgh2d\" (UniqueName: \"kubernetes.io/projected/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-kube-api-access-mgh2d\") pod \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.656688 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-etc-machine-id\") pod \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.656883 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-config-data-custom\") pod \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.656942 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-logs\") pod \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.657017 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-config-data\") pod \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.657049 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-combined-ca-bundle\") pod \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.657068 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-scripts\") pod \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\" (UID: \"8cdf4823-1b2c-44d9-bce9-5da6e5b87999\") " Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.657272 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/35069ebb-6578-40d8-b9c6-5183fee2f040-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " pod="openstack/ceilometer-0" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.657294 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/35069ebb-6578-40d8-b9c6-5183fee2f040-run-httpd\") pod \"ceilometer-0\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " pod="openstack/ceilometer-0" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.657320 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/35069ebb-6578-40d8-b9c6-5183fee2f040-log-httpd\") pod \"ceilometer-0\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " pod="openstack/ceilometer-0" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.657348 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35069ebb-6578-40d8-b9c6-5183fee2f040-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " pod="openstack/ceilometer-0" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.657408 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35069ebb-6578-40d8-b9c6-5183fee2f040-scripts\") pod \"ceilometer-0\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " pod="openstack/ceilometer-0" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.657428 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bz6g5\" (UniqueName: \"kubernetes.io/projected/35069ebb-6578-40d8-b9c6-5183fee2f040-kube-api-access-bz6g5\") pod \"ceilometer-0\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " pod="openstack/ceilometer-0" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.657465 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35069ebb-6578-40d8-b9c6-5183fee2f040-config-data\") pod \"ceilometer-0\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " pod="openstack/ceilometer-0" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.659362 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/35069ebb-6578-40d8-b9c6-5183fee2f040-log-httpd\") pod \"ceilometer-0\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " pod="openstack/ceilometer-0" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.663248 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "8cdf4823-1b2c-44d9-bce9-5da6e5b87999" (UID: "8cdf4823-1b2c-44d9-bce9-5da6e5b87999"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.663295 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "8cdf4823-1b2c-44d9-bce9-5da6e5b87999" (UID: "8cdf4823-1b2c-44d9-bce9-5da6e5b87999"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.664282 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-scripts" (OuterVolumeSpecName: "scripts") pod "8cdf4823-1b2c-44d9-bce9-5da6e5b87999" (UID: "8cdf4823-1b2c-44d9-bce9-5da6e5b87999"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.665780 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35069ebb-6578-40d8-b9c6-5183fee2f040-config-data\") pod \"ceilometer-0\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " pod="openstack/ceilometer-0" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.666040 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-logs" (OuterVolumeSpecName: "logs") pod "8cdf4823-1b2c-44d9-bce9-5da6e5b87999" (UID: "8cdf4823-1b2c-44d9-bce9-5da6e5b87999"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.666089 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/35069ebb-6578-40d8-b9c6-5183fee2f040-run-httpd\") pod \"ceilometer-0\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " pod="openstack/ceilometer-0" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.669435 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35069ebb-6578-40d8-b9c6-5183fee2f040-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " pod="openstack/ceilometer-0" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.669538 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35069ebb-6578-40d8-b9c6-5183fee2f040-scripts\") pod \"ceilometer-0\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " pod="openstack/ceilometer-0" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.669662 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-kube-api-access-mgh2d" (OuterVolumeSpecName: "kube-api-access-mgh2d") pod "8cdf4823-1b2c-44d9-bce9-5da6e5b87999" (UID: "8cdf4823-1b2c-44d9-bce9-5da6e5b87999"). InnerVolumeSpecName "kube-api-access-mgh2d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.670129 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/35069ebb-6578-40d8-b9c6-5183fee2f040-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " pod="openstack/ceilometer-0" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.693314 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bz6g5\" (UniqueName: \"kubernetes.io/projected/35069ebb-6578-40d8-b9c6-5183fee2f040-kube-api-access-bz6g5\") pod \"ceilometer-0\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " pod="openstack/ceilometer-0" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.727052 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8cdf4823-1b2c-44d9-bce9-5da6e5b87999" (UID: "8cdf4823-1b2c-44d9-bce9-5da6e5b87999"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.748457 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-config-data" (OuterVolumeSpecName: "config-data") pod "8cdf4823-1b2c-44d9-bce9-5da6e5b87999" (UID: "8cdf4823-1b2c-44d9-bce9-5da6e5b87999"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.759239 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.759267 4693 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-scripts\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.759278 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mgh2d\" (UniqueName: \"kubernetes.io/projected/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-kube-api-access-mgh2d\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.759288 4693 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.759296 4693 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.759305 4693 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-logs\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.759314 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8cdf4823-1b2c-44d9-bce9-5da6e5b87999-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.796518 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.961931 4693 generic.go:334] "Generic (PLEG): container finished" podID="b9f4f1ac-3f4d-470e-95ee-567268c5ad43" containerID="a41fdc74774d8175b31274e2e0ca63f06684adf355b53e4c3d3d2ac63d7fbdd0" exitCode=143 Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.962104 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-785c8f8986-64hh9" event={"ID":"b9f4f1ac-3f4d-470e-95ee-567268c5ad43","Type":"ContainerDied","Data":"a41fdc74774d8175b31274e2e0ca63f06684adf355b53e4c3d3d2ac63d7fbdd0"} Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.964613 4693 generic.go:334] "Generic (PLEG): container finished" podID="8cdf4823-1b2c-44d9-bce9-5da6e5b87999" containerID="85051501159f1026cda76e8a44c237f65b7c8dd9a4135e10e0ac7c2fcb42409e" exitCode=0 Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.964626 4693 generic.go:334] "Generic (PLEG): container finished" podID="8cdf4823-1b2c-44d9-bce9-5da6e5b87999" containerID="5388b3faed3f126af29ce70d760014810fe640139278794126d1361a2f39d6e3" exitCode=143 Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.964676 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.964663 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8cdf4823-1b2c-44d9-bce9-5da6e5b87999","Type":"ContainerDied","Data":"85051501159f1026cda76e8a44c237f65b7c8dd9a4135e10e0ac7c2fcb42409e"} Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.964744 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8cdf4823-1b2c-44d9-bce9-5da6e5b87999","Type":"ContainerDied","Data":"5388b3faed3f126af29ce70d760014810fe640139278794126d1361a2f39d6e3"} Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.964762 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8cdf4823-1b2c-44d9-bce9-5da6e5b87999","Type":"ContainerDied","Data":"01f0bfc395b7ce934cdd4f6b3ce34ff3702725b37b08e24fc4274576bdb0e259"} Oct 08 07:34:17 crc kubenswrapper[4693]: I1008 07:34:17.964779 4693 scope.go:117] "RemoveContainer" containerID="85051501159f1026cda76e8a44c237f65b7c8dd9a4135e10e0ac7c2fcb42409e" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.016162 4693 scope.go:117] "RemoveContainer" containerID="5388b3faed3f126af29ce70d760014810fe640139278794126d1361a2f39d6e3" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.027827 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.047989 4693 scope.go:117] "RemoveContainer" containerID="85051501159f1026cda76e8a44c237f65b7c8dd9a4135e10e0ac7c2fcb42409e" Oct 08 07:34:18 crc kubenswrapper[4693]: E1008 07:34:18.050913 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85051501159f1026cda76e8a44c237f65b7c8dd9a4135e10e0ac7c2fcb42409e\": container with ID starting with 85051501159f1026cda76e8a44c237f65b7c8dd9a4135e10e0ac7c2fcb42409e not found: ID does not exist" containerID="85051501159f1026cda76e8a44c237f65b7c8dd9a4135e10e0ac7c2fcb42409e" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.050952 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85051501159f1026cda76e8a44c237f65b7c8dd9a4135e10e0ac7c2fcb42409e"} err="failed to get container status \"85051501159f1026cda76e8a44c237f65b7c8dd9a4135e10e0ac7c2fcb42409e\": rpc error: code = NotFound desc = could not find container \"85051501159f1026cda76e8a44c237f65b7c8dd9a4135e10e0ac7c2fcb42409e\": container with ID starting with 85051501159f1026cda76e8a44c237f65b7c8dd9a4135e10e0ac7c2fcb42409e not found: ID does not exist" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.050993 4693 scope.go:117] "RemoveContainer" containerID="5388b3faed3f126af29ce70d760014810fe640139278794126d1361a2f39d6e3" Oct 08 07:34:18 crc kubenswrapper[4693]: E1008 07:34:18.051312 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5388b3faed3f126af29ce70d760014810fe640139278794126d1361a2f39d6e3\": container with ID starting with 5388b3faed3f126af29ce70d760014810fe640139278794126d1361a2f39d6e3 not found: ID does not exist" containerID="5388b3faed3f126af29ce70d760014810fe640139278794126d1361a2f39d6e3" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.051361 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5388b3faed3f126af29ce70d760014810fe640139278794126d1361a2f39d6e3"} err="failed to get container status \"5388b3faed3f126af29ce70d760014810fe640139278794126d1361a2f39d6e3\": rpc error: code = NotFound desc = could not find container \"5388b3faed3f126af29ce70d760014810fe640139278794126d1361a2f39d6e3\": container with ID starting with 5388b3faed3f126af29ce70d760014810fe640139278794126d1361a2f39d6e3 not found: ID does not exist" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.051389 4693 scope.go:117] "RemoveContainer" containerID="85051501159f1026cda76e8a44c237f65b7c8dd9a4135e10e0ac7c2fcb42409e" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.051675 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85051501159f1026cda76e8a44c237f65b7c8dd9a4135e10e0ac7c2fcb42409e"} err="failed to get container status \"85051501159f1026cda76e8a44c237f65b7c8dd9a4135e10e0ac7c2fcb42409e\": rpc error: code = NotFound desc = could not find container \"85051501159f1026cda76e8a44c237f65b7c8dd9a4135e10e0ac7c2fcb42409e\": container with ID starting with 85051501159f1026cda76e8a44c237f65b7c8dd9a4135e10e0ac7c2fcb42409e not found: ID does not exist" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.051695 4693 scope.go:117] "RemoveContainer" containerID="5388b3faed3f126af29ce70d760014810fe640139278794126d1361a2f39d6e3" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.051935 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5388b3faed3f126af29ce70d760014810fe640139278794126d1361a2f39d6e3"} err="failed to get container status \"5388b3faed3f126af29ce70d760014810fe640139278794126d1361a2f39d6e3\": rpc error: code = NotFound desc = could not find container \"5388b3faed3f126af29ce70d760014810fe640139278794126d1361a2f39d6e3\": container with ID starting with 5388b3faed3f126af29ce70d760014810fe640139278794126d1361a2f39d6e3 not found: ID does not exist" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.055375 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.062583 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 08 07:34:18 crc kubenswrapper[4693]: E1008 07:34:18.063001 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cdf4823-1b2c-44d9-bce9-5da6e5b87999" containerName="cinder-api-log" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.063019 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cdf4823-1b2c-44d9-bce9-5da6e5b87999" containerName="cinder-api-log" Oct 08 07:34:18 crc kubenswrapper[4693]: E1008 07:34:18.063046 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cdf4823-1b2c-44d9-bce9-5da6e5b87999" containerName="cinder-api" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.063054 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cdf4823-1b2c-44d9-bce9-5da6e5b87999" containerName="cinder-api" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.063232 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cdf4823-1b2c-44d9-bce9-5da6e5b87999" containerName="cinder-api-log" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.063254 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cdf4823-1b2c-44d9-bce9-5da6e5b87999" containerName="cinder-api" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.064565 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.068291 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.069434 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.070226 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.072970 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.165766 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebd0e852-8dca-49c3-9af2-00f4d652216e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ebd0e852-8dca-49c3-9af2-00f4d652216e\") " pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.165840 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebd0e852-8dca-49c3-9af2-00f4d652216e-public-tls-certs\") pod \"cinder-api-0\" (UID: \"ebd0e852-8dca-49c3-9af2-00f4d652216e\") " pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.165882 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebd0e852-8dca-49c3-9af2-00f4d652216e-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"ebd0e852-8dca-49c3-9af2-00f4d652216e\") " pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.165898 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebd0e852-8dca-49c3-9af2-00f4d652216e-scripts\") pod \"cinder-api-0\" (UID: \"ebd0e852-8dca-49c3-9af2-00f4d652216e\") " pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.166098 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ebd0e852-8dca-49c3-9af2-00f4d652216e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ebd0e852-8dca-49c3-9af2-00f4d652216e\") " pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.166188 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ebd0e852-8dca-49c3-9af2-00f4d652216e-logs\") pod \"cinder-api-0\" (UID: \"ebd0e852-8dca-49c3-9af2-00f4d652216e\") " pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.166319 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebd0e852-8dca-49c3-9af2-00f4d652216e-config-data\") pod \"cinder-api-0\" (UID: \"ebd0e852-8dca-49c3-9af2-00f4d652216e\") " pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.166362 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ebd0e852-8dca-49c3-9af2-00f4d652216e-config-data-custom\") pod \"cinder-api-0\" (UID: \"ebd0e852-8dca-49c3-9af2-00f4d652216e\") " pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.166388 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hmt8\" (UniqueName: \"kubernetes.io/projected/ebd0e852-8dca-49c3-9af2-00f4d652216e-kube-api-access-2hmt8\") pod \"cinder-api-0\" (UID: \"ebd0e852-8dca-49c3-9af2-00f4d652216e\") " pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.262480 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.268087 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebd0e852-8dca-49c3-9af2-00f4d652216e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ebd0e852-8dca-49c3-9af2-00f4d652216e\") " pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.268137 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebd0e852-8dca-49c3-9af2-00f4d652216e-public-tls-certs\") pod \"cinder-api-0\" (UID: \"ebd0e852-8dca-49c3-9af2-00f4d652216e\") " pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.268191 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebd0e852-8dca-49c3-9af2-00f4d652216e-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"ebd0e852-8dca-49c3-9af2-00f4d652216e\") " pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.268208 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebd0e852-8dca-49c3-9af2-00f4d652216e-scripts\") pod \"cinder-api-0\" (UID: \"ebd0e852-8dca-49c3-9af2-00f4d652216e\") " pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.268255 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ebd0e852-8dca-49c3-9af2-00f4d652216e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ebd0e852-8dca-49c3-9af2-00f4d652216e\") " pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.268277 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ebd0e852-8dca-49c3-9af2-00f4d652216e-logs\") pod \"cinder-api-0\" (UID: \"ebd0e852-8dca-49c3-9af2-00f4d652216e\") " pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.268315 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebd0e852-8dca-49c3-9af2-00f4d652216e-config-data\") pod \"cinder-api-0\" (UID: \"ebd0e852-8dca-49c3-9af2-00f4d652216e\") " pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.268342 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ebd0e852-8dca-49c3-9af2-00f4d652216e-config-data-custom\") pod \"cinder-api-0\" (UID: \"ebd0e852-8dca-49c3-9af2-00f4d652216e\") " pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.268364 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hmt8\" (UniqueName: \"kubernetes.io/projected/ebd0e852-8dca-49c3-9af2-00f4d652216e-kube-api-access-2hmt8\") pod \"cinder-api-0\" (UID: \"ebd0e852-8dca-49c3-9af2-00f4d652216e\") " pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.269425 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ebd0e852-8dca-49c3-9af2-00f4d652216e-logs\") pod \"cinder-api-0\" (UID: \"ebd0e852-8dca-49c3-9af2-00f4d652216e\") " pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.269471 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ebd0e852-8dca-49c3-9af2-00f4d652216e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ebd0e852-8dca-49c3-9af2-00f4d652216e\") " pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.274198 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebd0e852-8dca-49c3-9af2-00f4d652216e-scripts\") pod \"cinder-api-0\" (UID: \"ebd0e852-8dca-49c3-9af2-00f4d652216e\") " pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.274471 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebd0e852-8dca-49c3-9af2-00f4d652216e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ebd0e852-8dca-49c3-9af2-00f4d652216e\") " pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.274482 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebd0e852-8dca-49c3-9af2-00f4d652216e-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"ebd0e852-8dca-49c3-9af2-00f4d652216e\") " pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.275610 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebd0e852-8dca-49c3-9af2-00f4d652216e-config-data\") pod \"cinder-api-0\" (UID: \"ebd0e852-8dca-49c3-9af2-00f4d652216e\") " pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.277758 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebd0e852-8dca-49c3-9af2-00f4d652216e-public-tls-certs\") pod \"cinder-api-0\" (UID: \"ebd0e852-8dca-49c3-9af2-00f4d652216e\") " pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.282441 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ebd0e852-8dca-49c3-9af2-00f4d652216e-config-data-custom\") pod \"cinder-api-0\" (UID: \"ebd0e852-8dca-49c3-9af2-00f4d652216e\") " pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.288809 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hmt8\" (UniqueName: \"kubernetes.io/projected/ebd0e852-8dca-49c3-9af2-00f4d652216e-kube-api-access-2hmt8\") pod \"cinder-api-0\" (UID: \"ebd0e852-8dca-49c3-9af2-00f4d652216e\") " pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.394923 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.837501 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.981997 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"35069ebb-6578-40d8-b9c6-5183fee2f040","Type":"ContainerStarted","Data":"a2c9d4aa4782462b18cd7219525efe718ef1a7d01f2cae33997f231ff1943361"} Oct 08 07:34:18 crc kubenswrapper[4693]: I1008 07:34:18.985678 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ebd0e852-8dca-49c3-9af2-00f4d652216e","Type":"ContainerStarted","Data":"9c08263e41509698ffa2a94393797a0040661894385b5ecaeaf507ce7d0a0b33"} Oct 08 07:34:19 crc kubenswrapper[4693]: I1008 07:34:19.386745 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="019964d2-ae2a-4501-ad53-c6c0b2260a32" path="/var/lib/kubelet/pods/019964d2-ae2a-4501-ad53-c6c0b2260a32/volumes" Oct 08 07:34:19 crc kubenswrapper[4693]: I1008 07:34:19.389199 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cdf4823-1b2c-44d9-bce9-5da6e5b87999" path="/var/lib/kubelet/pods/8cdf4823-1b2c-44d9-bce9-5da6e5b87999/volumes" Oct 08 07:34:20 crc kubenswrapper[4693]: I1008 07:34:20.006519 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ebd0e852-8dca-49c3-9af2-00f4d652216e","Type":"ContainerStarted","Data":"42937696c4c93bc5a1da694fa1a4ab95eff65e66bc178bb72c9f787f709fae02"} Oct 08 07:34:20 crc kubenswrapper[4693]: I1008 07:34:20.019763 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"35069ebb-6578-40d8-b9c6-5183fee2f040","Type":"ContainerStarted","Data":"57cb649cfa6790752324650a73dcf14894d6feeebacf6aed8488675ed3635dbf"} Oct 08 07:34:20 crc kubenswrapper[4693]: I1008 07:34:20.019798 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"35069ebb-6578-40d8-b9c6-5183fee2f040","Type":"ContainerStarted","Data":"0565fdd057666d10ff2afcc33108730e90cdffcb42d4b3587c1825446d7217ee"} Oct 08 07:34:21 crc kubenswrapper[4693]: I1008 07:34:21.049027 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ebd0e852-8dca-49c3-9af2-00f4d652216e","Type":"ContainerStarted","Data":"86dc9bb48a1ea2ae72c90f668d34250f3129953186a608efe2dff538e348b81c"} Oct 08 07:34:21 crc kubenswrapper[4693]: I1008 07:34:21.050727 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 08 07:34:21 crc kubenswrapper[4693]: I1008 07:34:21.054869 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"35069ebb-6578-40d8-b9c6-5183fee2f040","Type":"ContainerStarted","Data":"194798bfd074ca73594f4425588ec43fbe83ca6bd980280ea5f17ce08815f1f3"} Oct 08 07:34:21 crc kubenswrapper[4693]: I1008 07:34:21.058797 4693 generic.go:334] "Generic (PLEG): container finished" podID="b9f4f1ac-3f4d-470e-95ee-567268c5ad43" containerID="4de8c1d29643d97d308a75c26e8a1e4aadfe7216cac9eda538604c4010815ab5" exitCode=0 Oct 08 07:34:21 crc kubenswrapper[4693]: I1008 07:34:21.059155 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-785c8f8986-64hh9" event={"ID":"b9f4f1ac-3f4d-470e-95ee-567268c5ad43","Type":"ContainerDied","Data":"4de8c1d29643d97d308a75c26e8a1e4aadfe7216cac9eda538604c4010815ab5"} Oct 08 07:34:21 crc kubenswrapper[4693]: I1008 07:34:21.122365 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-785c8f8986-64hh9" Oct 08 07:34:21 crc kubenswrapper[4693]: I1008 07:34:21.154013 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.153999059 podStartE2EDuration="3.153999059s" podCreationTimestamp="2025-10-08 07:34:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:34:21.083398944 +0000 UTC m=+1046.454363929" watchObservedRunningTime="2025-10-08 07:34:21.153999059 +0000 UTC m=+1046.524963994" Oct 08 07:34:21 crc kubenswrapper[4693]: I1008 07:34:21.237056 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-config-data-custom\") pod \"b9f4f1ac-3f4d-470e-95ee-567268c5ad43\" (UID: \"b9f4f1ac-3f4d-470e-95ee-567268c5ad43\") " Oct 08 07:34:21 crc kubenswrapper[4693]: I1008 07:34:21.237215 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-config-data\") pod \"b9f4f1ac-3f4d-470e-95ee-567268c5ad43\" (UID: \"b9f4f1ac-3f4d-470e-95ee-567268c5ad43\") " Oct 08 07:34:21 crc kubenswrapper[4693]: I1008 07:34:21.237281 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-combined-ca-bundle\") pod \"b9f4f1ac-3f4d-470e-95ee-567268c5ad43\" (UID: \"b9f4f1ac-3f4d-470e-95ee-567268c5ad43\") " Oct 08 07:34:21 crc kubenswrapper[4693]: I1008 07:34:21.237322 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6r7t2\" (UniqueName: \"kubernetes.io/projected/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-kube-api-access-6r7t2\") pod \"b9f4f1ac-3f4d-470e-95ee-567268c5ad43\" (UID: \"b9f4f1ac-3f4d-470e-95ee-567268c5ad43\") " Oct 08 07:34:21 crc kubenswrapper[4693]: I1008 07:34:21.237362 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-logs\") pod \"b9f4f1ac-3f4d-470e-95ee-567268c5ad43\" (UID: \"b9f4f1ac-3f4d-470e-95ee-567268c5ad43\") " Oct 08 07:34:21 crc kubenswrapper[4693]: I1008 07:34:21.238337 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-logs" (OuterVolumeSpecName: "logs") pod "b9f4f1ac-3f4d-470e-95ee-567268c5ad43" (UID: "b9f4f1ac-3f4d-470e-95ee-567268c5ad43"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:34:21 crc kubenswrapper[4693]: I1008 07:34:21.243213 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b9f4f1ac-3f4d-470e-95ee-567268c5ad43" (UID: "b9f4f1ac-3f4d-470e-95ee-567268c5ad43"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:21 crc kubenswrapper[4693]: I1008 07:34:21.257335 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-kube-api-access-6r7t2" (OuterVolumeSpecName: "kube-api-access-6r7t2") pod "b9f4f1ac-3f4d-470e-95ee-567268c5ad43" (UID: "b9f4f1ac-3f4d-470e-95ee-567268c5ad43"). InnerVolumeSpecName "kube-api-access-6r7t2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:34:21 crc kubenswrapper[4693]: I1008 07:34:21.265428 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b9f4f1ac-3f4d-470e-95ee-567268c5ad43" (UID: "b9f4f1ac-3f4d-470e-95ee-567268c5ad43"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:21 crc kubenswrapper[4693]: I1008 07:34:21.305457 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-config-data" (OuterVolumeSpecName: "config-data") pod "b9f4f1ac-3f4d-470e-95ee-567268c5ad43" (UID: "b9f4f1ac-3f4d-470e-95ee-567268c5ad43"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:21 crc kubenswrapper[4693]: I1008 07:34:21.340361 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:21 crc kubenswrapper[4693]: I1008 07:34:21.340403 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6r7t2\" (UniqueName: \"kubernetes.io/projected/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-kube-api-access-6r7t2\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:21 crc kubenswrapper[4693]: I1008 07:34:21.340418 4693 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-logs\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:21 crc kubenswrapper[4693]: I1008 07:34:21.340430 4693 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:21 crc kubenswrapper[4693]: I1008 07:34:21.340442 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9f4f1ac-3f4d-470e-95ee-567268c5ad43-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:21 crc kubenswrapper[4693]: I1008 07:34:21.716239 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-7fd7c44d-xwrpr" Oct 08 07:34:22 crc kubenswrapper[4693]: I1008 07:34:22.072383 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-785c8f8986-64hh9" Oct 08 07:34:22 crc kubenswrapper[4693]: I1008 07:34:22.072857 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-785c8f8986-64hh9" event={"ID":"b9f4f1ac-3f4d-470e-95ee-567268c5ad43","Type":"ContainerDied","Data":"5c875d89fbcaee4d31046a68e3866fb8aa8920d18bb62036646675f00b785613"} Oct 08 07:34:22 crc kubenswrapper[4693]: I1008 07:34:22.072900 4693 scope.go:117] "RemoveContainer" containerID="4de8c1d29643d97d308a75c26e8a1e4aadfe7216cac9eda538604c4010815ab5" Oct 08 07:34:22 crc kubenswrapper[4693]: I1008 07:34:22.098776 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-785c8f8986-64hh9"] Oct 08 07:34:22 crc kubenswrapper[4693]: I1008 07:34:22.106914 4693 scope.go:117] "RemoveContainer" containerID="a41fdc74774d8175b31274e2e0ca63f06684adf355b53e4c3d3d2ac63d7fbdd0" Oct 08 07:34:22 crc kubenswrapper[4693]: I1008 07:34:22.110411 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-785c8f8986-64hh9"] Oct 08 07:34:22 crc kubenswrapper[4693]: I1008 07:34:22.506514 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 08 07:34:22 crc kubenswrapper[4693]: I1008 07:34:22.539433 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 08 07:34:22 crc kubenswrapper[4693]: I1008 07:34:22.550982 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" Oct 08 07:34:22 crc kubenswrapper[4693]: I1008 07:34:22.620135 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-qkfgb"] Oct 08 07:34:22 crc kubenswrapper[4693]: I1008 07:34:22.620434 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" podUID="c34ee0aa-60c1-4dce-8a22-5415eb6da004" containerName="dnsmasq-dns" containerID="cri-o://fc3c598c4d37e5c2481815949bbebaec782b310b23a8009a7a37554fbc6f2a78" gracePeriod=10 Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.110041 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"35069ebb-6578-40d8-b9c6-5183fee2f040","Type":"ContainerStarted","Data":"3cad5619d28f9561c8e36a556f4556b1321bae61166da952af5e48a01a7a8052"} Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.112257 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.120480 4693 generic.go:334] "Generic (PLEG): container finished" podID="c34ee0aa-60c1-4dce-8a22-5415eb6da004" containerID="fc3c598c4d37e5c2481815949bbebaec782b310b23a8009a7a37554fbc6f2a78" exitCode=0 Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.120746 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="a5fbb923-ed64-4259-8d7c-9d19b190c785" containerName="cinder-scheduler" containerID="cri-o://6dc75089e409c7dc0e1171bd9a0b0725c527e230b65d82b04805d9cf65fb962f" gracePeriod=30 Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.121051 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" event={"ID":"c34ee0aa-60c1-4dce-8a22-5415eb6da004","Type":"ContainerDied","Data":"fc3c598c4d37e5c2481815949bbebaec782b310b23a8009a7a37554fbc6f2a78"} Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.121087 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" event={"ID":"c34ee0aa-60c1-4dce-8a22-5415eb6da004","Type":"ContainerDied","Data":"238f0b10b09dab492e6ab6447f224a163cf6b07624a01603561338b5c9b0cd03"} Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.121098 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="238f0b10b09dab492e6ab6447f224a163cf6b07624a01603561338b5c9b0cd03" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.121123 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="a5fbb923-ed64-4259-8d7c-9d19b190c785" containerName="probe" containerID="cri-o://0e3dce039fc6cd8d2c72e2996b77dc3c929665046e5e5051955506101f934f11" gracePeriod=30 Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.141842 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.606639467 podStartE2EDuration="6.141807449s" podCreationTimestamp="2025-10-08 07:34:17 +0000 UTC" firstStartedPulling="2025-10-08 07:34:18.277564017 +0000 UTC m=+1043.648528962" lastFinishedPulling="2025-10-08 07:34:21.812732009 +0000 UTC m=+1047.183696944" observedRunningTime="2025-10-08 07:34:23.133036246 +0000 UTC m=+1048.504001181" watchObservedRunningTime="2025-10-08 07:34:23.141807449 +0000 UTC m=+1048.512772384" Oct 08 07:34:23 crc kubenswrapper[4693]: E1008 07:34:23.206530 4693 manager.go:1116] Failed to create existing container: /kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9ec230f9_e0bd_4760_aeef_08d63ef6b795.slice/crio-5bfe6ac4dcc612359f9b1efc021941edd205ed9e16410e97e5cb63e527810a37: Error finding container 5bfe6ac4dcc612359f9b1efc021941edd205ed9e16410e97e5cb63e527810a37: Status 404 returned error can't find the container with id 5bfe6ac4dcc612359f9b1efc021941edd205ed9e16410e97e5cb63e527810a37 Oct 08 07:34:23 crc kubenswrapper[4693]: W1008 07:34:23.213801 4693 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6246bdc3_ea30_4c42_ba72_bcee44aa057f.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6246bdc3_ea30_4c42_ba72_bcee44aa057f.slice: no such file or directory Oct 08 07:34:23 crc kubenswrapper[4693]: W1008 07:34:23.213878 4693 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9f4f1ac_3f4d_470e_95ee_567268c5ad43.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9f4f1ac_3f4d_470e_95ee_567268c5ad43.slice: no such file or directory Oct 08 07:34:23 crc kubenswrapper[4693]: W1008 07:34:23.213918 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod019964d2_ae2a_4501_ad53_c6c0b2260a32.slice/crio-8be22c2855ed045547ffb2f962f224c3a8f9ae038511c3220f84ae2ef8ecf6e6.scope WatchSource:0}: Error finding container 8be22c2855ed045547ffb2f962f224c3a8f9ae038511c3220f84ae2ef8ecf6e6: Status 404 returned error can't find the container with id 8be22c2855ed045547ffb2f962f224c3a8f9ae038511c3220f84ae2ef8ecf6e6 Oct 08 07:34:23 crc kubenswrapper[4693]: W1008 07:34:23.223604 4693 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode1bdcd99_a53d_45ee_b439_57c0e0025fb9.slice/crio-conmon-e6d6086133e430d57e92411d11b7b846138ac1be139229c339e52287578db8e0.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode1bdcd99_a53d_45ee_b439_57c0e0025fb9.slice/crio-conmon-e6d6086133e430d57e92411d11b7b846138ac1be139229c339e52287578db8e0.scope: no such file or directory Oct 08 07:34:23 crc kubenswrapper[4693]: W1008 07:34:23.223667 4693 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode1bdcd99_a53d_45ee_b439_57c0e0025fb9.slice/crio-e6d6086133e430d57e92411d11b7b846138ac1be139229c339e52287578db8e0.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode1bdcd99_a53d_45ee_b439_57c0e0025fb9.slice/crio-e6d6086133e430d57e92411d11b7b846138ac1be139229c339e52287578db8e0.scope: no such file or directory Oct 08 07:34:23 crc kubenswrapper[4693]: W1008 07:34:23.234727 4693 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod019964d2_ae2a_4501_ad53_c6c0b2260a32.slice/crio-conmon-7003a2eddf97581b946ef96192b37075c1eb6454a9cba0f882dbaad917583b70.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod019964d2_ae2a_4501_ad53_c6c0b2260a32.slice/crio-conmon-7003a2eddf97581b946ef96192b37075c1eb6454a9cba0f882dbaad917583b70.scope: no such file or directory Oct 08 07:34:23 crc kubenswrapper[4693]: W1008 07:34:23.234797 4693 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod019964d2_ae2a_4501_ad53_c6c0b2260a32.slice/crio-7003a2eddf97581b946ef96192b37075c1eb6454a9cba0f882dbaad917583b70.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod019964d2_ae2a_4501_ad53_c6c0b2260a32.slice/crio-7003a2eddf97581b946ef96192b37075c1eb6454a9cba0f882dbaad917583b70.scope: no such file or directory Oct 08 07:34:23 crc kubenswrapper[4693]: W1008 07:34:23.254966 4693 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8cdf4823_1b2c_44d9_bce9_5da6e5b87999.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8cdf4823_1b2c_44d9_bce9_5da6e5b87999.slice: no such file or directory Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.303774 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.373120 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9f4f1ac-3f4d-470e-95ee-567268c5ad43" path="/var/lib/kubelet/pods/b9f4f1ac-3f4d-470e-95ee-567268c5ad43/volumes" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.379962 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-ovsdbserver-sb\") pod \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\" (UID: \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\") " Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.380620 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-ovsdbserver-nb\") pod \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\" (UID: \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\") " Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.380650 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-config\") pod \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\" (UID: \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\") " Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.380738 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-dns-swift-storage-0\") pod \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\" (UID: \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\") " Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.380789 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmkrc\" (UniqueName: \"kubernetes.io/projected/c34ee0aa-60c1-4dce-8a22-5415eb6da004-kube-api-access-dmkrc\") pod \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\" (UID: \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\") " Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.380877 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-dns-svc\") pod \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\" (UID: \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\") " Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.391979 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c34ee0aa-60c1-4dce-8a22-5415eb6da004-kube-api-access-dmkrc" (OuterVolumeSpecName: "kube-api-access-dmkrc") pod "c34ee0aa-60c1-4dce-8a22-5415eb6da004" (UID: "c34ee0aa-60c1-4dce-8a22-5415eb6da004"). InnerVolumeSpecName "kube-api-access-dmkrc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.425531 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c34ee0aa-60c1-4dce-8a22-5415eb6da004" (UID: "c34ee0aa-60c1-4dce-8a22-5415eb6da004"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.449422 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c34ee0aa-60c1-4dce-8a22-5415eb6da004" (UID: "c34ee0aa-60c1-4dce-8a22-5415eb6da004"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:34:23 crc kubenswrapper[4693]: E1008 07:34:23.462995 4693 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9ec230f9_e0bd_4760_aeef_08d63ef6b795.slice/crio-conmon-23fdbe5298b1e5fbd8540bd37a1bfde51d58e34ee714890f8d60e70751218414.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9ec230f9_e0bd_4760_aeef_08d63ef6b795.slice/crio-23fdbe5298b1e5fbd8540bd37a1bfde51d58e34ee714890f8d60e70751218414.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9ec230f9_e0bd_4760_aeef_08d63ef6b795.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod019964d2_ae2a_4501_ad53_c6c0b2260a32.slice/crio-conmon-8be22c2855ed045547ffb2f962f224c3a8f9ae038511c3220f84ae2ef8ecf6e6.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb4d301e9_d078_4876_a6a2_52a7c3b4dcbe.slice/crio-a26f89784069255b23e6d92b51296afb3716749d97d4e8e04150d1d52ec765cd\": RecentStats: unable to find data in memory cache]" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.479567 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c34ee0aa-60c1-4dce-8a22-5415eb6da004" (UID: "c34ee0aa-60c1-4dce-8a22-5415eb6da004"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.481589 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-config" (OuterVolumeSpecName: "config") pod "c34ee0aa-60c1-4dce-8a22-5415eb6da004" (UID: "c34ee0aa-60c1-4dce-8a22-5415eb6da004"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.482455 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-config\") pod \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\" (UID: \"c34ee0aa-60c1-4dce-8a22-5415eb6da004\") " Oct 08 07:34:23 crc kubenswrapper[4693]: W1008 07:34:23.482568 4693 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/c34ee0aa-60c1-4dce-8a22-5415eb6da004/volumes/kubernetes.io~configmap/config Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.482586 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-config" (OuterVolumeSpecName: "config") pod "c34ee0aa-60c1-4dce-8a22-5415eb6da004" (UID: "c34ee0aa-60c1-4dce-8a22-5415eb6da004"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.482872 4693 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.482887 4693 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.482920 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.482931 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmkrc\" (UniqueName: \"kubernetes.io/projected/c34ee0aa-60c1-4dce-8a22-5415eb6da004-kube-api-access-dmkrc\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.482940 4693 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.489790 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.489849 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.508724 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c34ee0aa-60c1-4dce-8a22-5415eb6da004" (UID: "c34ee0aa-60c1-4dce-8a22-5415eb6da004"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.521502 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-748d7c6795-mlmk2" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.583901 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-scripts\") pod \"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb\" (UID: \"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb\") " Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.584020 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-logs\") pod \"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb\" (UID: \"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb\") " Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.584075 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-horizon-secret-key\") pod \"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb\" (UID: \"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb\") " Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.584469 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-logs" (OuterVolumeSpecName: "logs") pod "a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb" (UID: "a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.584599 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z56f8\" (UniqueName: \"kubernetes.io/projected/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-kube-api-access-z56f8\") pod \"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb\" (UID: \"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb\") " Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.584622 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-config-data\") pod \"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb\" (UID: \"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb\") " Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.585076 4693 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-logs\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.585089 4693 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c34ee0aa-60c1-4dce-8a22-5415eb6da004-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.588151 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb" (UID: "a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.588311 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-kube-api-access-z56f8" (OuterVolumeSpecName: "kube-api-access-z56f8") pod "a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb" (UID: "a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb"). InnerVolumeSpecName "kube-api-access-z56f8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.610791 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-scripts" (OuterVolumeSpecName: "scripts") pod "a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb" (UID: "a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.612663 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-config-data" (OuterVolumeSpecName: "config-data") pod "a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb" (UID: "a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.686565 4693 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-scripts\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.686598 4693 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.686611 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z56f8\" (UniqueName: \"kubernetes.io/projected/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-kube-api-access-z56f8\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.686620 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:23 crc kubenswrapper[4693]: I1008 07:34:23.981895 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-786b4cdb4-z6p8n" Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.015273 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-77bfdd5769-m42ll" Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.025124 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.082904 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7fd7c44d-xwrpr"] Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.084111 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7fd7c44d-xwrpr" podUID="3a935819-12b0-495b-944b-d74e091f176c" containerName="neutron-api" containerID="cri-o://94146f4bfca7747b4b942bfe8395949b5670d17a4eec6657e0f6afb104e9df10" gracePeriod=30 Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.084278 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7fd7c44d-xwrpr" podUID="3a935819-12b0-495b-944b-d74e091f176c" containerName="neutron-httpd" containerID="cri-o://75e0792a492c6155b18866c1c8b8c7919f159e31e7a4b2391a17fe20ee1f2010" gracePeriod=30 Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.133710 4693 generic.go:334] "Generic (PLEG): container finished" podID="a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb" containerID="81a08bae25ca456c6009cc8ab6e277958093ac319c70f3d93a88bb65ee3c6dba" exitCode=137 Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.133739 4693 generic.go:334] "Generic (PLEG): container finished" podID="a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb" containerID="f1d657d07db54d7a93d42aeab030cd9ba9cfb1471a08290e6450c04c17c5e245" exitCode=137 Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.133778 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-748d7c6795-mlmk2" event={"ID":"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb","Type":"ContainerDied","Data":"81a08bae25ca456c6009cc8ab6e277958093ac319c70f3d93a88bb65ee3c6dba"} Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.133804 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-748d7c6795-mlmk2" event={"ID":"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb","Type":"ContainerDied","Data":"f1d657d07db54d7a93d42aeab030cd9ba9cfb1471a08290e6450c04c17c5e245"} Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.133827 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-748d7c6795-mlmk2" event={"ID":"a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb","Type":"ContainerDied","Data":"70773bc51e9f6375f63f23da545a03b434516f5462e6edbd42b02df42de7578a"} Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.133838 4693 scope.go:117] "RemoveContainer" containerID="81a08bae25ca456c6009cc8ab6e277958093ac319c70f3d93a88bb65ee3c6dba" Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.135388 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-748d7c6795-mlmk2" Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.135491 4693 generic.go:334] "Generic (PLEG): container finished" podID="a5fbb923-ed64-4259-8d7c-9d19b190c785" containerID="0e3dce039fc6cd8d2c72e2996b77dc3c929665046e5e5051955506101f934f11" exitCode=0 Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.135525 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a5fbb923-ed64-4259-8d7c-9d19b190c785","Type":"ContainerDied","Data":"0e3dce039fc6cd8d2c72e2996b77dc3c929665046e5e5051955506101f934f11"} Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.141409 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-qkfgb" Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.170849 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-748d7c6795-mlmk2"] Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.190476 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-748d7c6795-mlmk2"] Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.197481 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-qkfgb"] Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.209410 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-qkfgb"] Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.325396 4693 scope.go:117] "RemoveContainer" containerID="f1d657d07db54d7a93d42aeab030cd9ba9cfb1471a08290e6450c04c17c5e245" Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.375085 4693 scope.go:117] "RemoveContainer" containerID="81a08bae25ca456c6009cc8ab6e277958093ac319c70f3d93a88bb65ee3c6dba" Oct 08 07:34:24 crc kubenswrapper[4693]: E1008 07:34:24.375518 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81a08bae25ca456c6009cc8ab6e277958093ac319c70f3d93a88bb65ee3c6dba\": container with ID starting with 81a08bae25ca456c6009cc8ab6e277958093ac319c70f3d93a88bb65ee3c6dba not found: ID does not exist" containerID="81a08bae25ca456c6009cc8ab6e277958093ac319c70f3d93a88bb65ee3c6dba" Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.375547 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81a08bae25ca456c6009cc8ab6e277958093ac319c70f3d93a88bb65ee3c6dba"} err="failed to get container status \"81a08bae25ca456c6009cc8ab6e277958093ac319c70f3d93a88bb65ee3c6dba\": rpc error: code = NotFound desc = could not find container \"81a08bae25ca456c6009cc8ab6e277958093ac319c70f3d93a88bb65ee3c6dba\": container with ID starting with 81a08bae25ca456c6009cc8ab6e277958093ac319c70f3d93a88bb65ee3c6dba not found: ID does not exist" Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.375567 4693 scope.go:117] "RemoveContainer" containerID="f1d657d07db54d7a93d42aeab030cd9ba9cfb1471a08290e6450c04c17c5e245" Oct 08 07:34:24 crc kubenswrapper[4693]: E1008 07:34:24.375826 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1d657d07db54d7a93d42aeab030cd9ba9cfb1471a08290e6450c04c17c5e245\": container with ID starting with f1d657d07db54d7a93d42aeab030cd9ba9cfb1471a08290e6450c04c17c5e245 not found: ID does not exist" containerID="f1d657d07db54d7a93d42aeab030cd9ba9cfb1471a08290e6450c04c17c5e245" Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.375850 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1d657d07db54d7a93d42aeab030cd9ba9cfb1471a08290e6450c04c17c5e245"} err="failed to get container status \"f1d657d07db54d7a93d42aeab030cd9ba9cfb1471a08290e6450c04c17c5e245\": rpc error: code = NotFound desc = could not find container \"f1d657d07db54d7a93d42aeab030cd9ba9cfb1471a08290e6450c04c17c5e245\": container with ID starting with f1d657d07db54d7a93d42aeab030cd9ba9cfb1471a08290e6450c04c17c5e245 not found: ID does not exist" Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.375864 4693 scope.go:117] "RemoveContainer" containerID="81a08bae25ca456c6009cc8ab6e277958093ac319c70f3d93a88bb65ee3c6dba" Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.376084 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81a08bae25ca456c6009cc8ab6e277958093ac319c70f3d93a88bb65ee3c6dba"} err="failed to get container status \"81a08bae25ca456c6009cc8ab6e277958093ac319c70f3d93a88bb65ee3c6dba\": rpc error: code = NotFound desc = could not find container \"81a08bae25ca456c6009cc8ab6e277958093ac319c70f3d93a88bb65ee3c6dba\": container with ID starting with 81a08bae25ca456c6009cc8ab6e277958093ac319c70f3d93a88bb65ee3c6dba not found: ID does not exist" Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.376106 4693 scope.go:117] "RemoveContainer" containerID="f1d657d07db54d7a93d42aeab030cd9ba9cfb1471a08290e6450c04c17c5e245" Oct 08 07:34:24 crc kubenswrapper[4693]: I1008 07:34:24.376318 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1d657d07db54d7a93d42aeab030cd9ba9cfb1471a08290e6450c04c17c5e245"} err="failed to get container status \"f1d657d07db54d7a93d42aeab030cd9ba9cfb1471a08290e6450c04c17c5e245\": rpc error: code = NotFound desc = could not find container \"f1d657d07db54d7a93d42aeab030cd9ba9cfb1471a08290e6450c04c17c5e245\": container with ID starting with f1d657d07db54d7a93d42aeab030cd9ba9cfb1471a08290e6450c04c17c5e245 not found: ID does not exist" Oct 08 07:34:25 crc kubenswrapper[4693]: I1008 07:34:25.144019 4693 generic.go:334] "Generic (PLEG): container finished" podID="3a935819-12b0-495b-944b-d74e091f176c" containerID="75e0792a492c6155b18866c1c8b8c7919f159e31e7a4b2391a17fe20ee1f2010" exitCode=0 Oct 08 07:34:25 crc kubenswrapper[4693]: I1008 07:34:25.144406 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7fd7c44d-xwrpr" event={"ID":"3a935819-12b0-495b-944b-d74e091f176c","Type":"ContainerDied","Data":"75e0792a492c6155b18866c1c8b8c7919f159e31e7a4b2391a17fe20ee1f2010"} Oct 08 07:34:25 crc kubenswrapper[4693]: I1008 07:34:25.377517 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb" path="/var/lib/kubelet/pods/a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb/volumes" Oct 08 07:34:25 crc kubenswrapper[4693]: I1008 07:34:25.378354 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c34ee0aa-60c1-4dce-8a22-5415eb6da004" path="/var/lib/kubelet/pods/c34ee0aa-60c1-4dce-8a22-5415eb6da004/volumes" Oct 08 07:34:25 crc kubenswrapper[4693]: I1008 07:34:25.660005 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:34:25 crc kubenswrapper[4693]: I1008 07:34:25.798600 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-786b4cdb4-z6p8n" Oct 08 07:34:25 crc kubenswrapper[4693]: I1008 07:34:25.859148 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-647ccf6b96-zrz9s"] Oct 08 07:34:26 crc kubenswrapper[4693]: I1008 07:34:26.155521 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-647ccf6b96-zrz9s" podUID="4c13d244-5d68-4fdc-834e-90409425f7f4" containerName="horizon-log" containerID="cri-o://fc92eb0f369f2698c35e8a4718fd26c5bba374f74c755d0186e9ffc29c1c2837" gracePeriod=30 Oct 08 07:34:26 crc kubenswrapper[4693]: I1008 07:34:26.155589 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-647ccf6b96-zrz9s" podUID="4c13d244-5d68-4fdc-834e-90409425f7f4" containerName="horizon" containerID="cri-o://1e19d1e84dee0ab2547ac52659a0dbc638b95ad225686a77cb411f0a920dc101" gracePeriod=30 Oct 08 07:34:26 crc kubenswrapper[4693]: I1008 07:34:26.955152 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.069016 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a5fbb923-ed64-4259-8d7c-9d19b190c785-etc-machine-id\") pod \"a5fbb923-ed64-4259-8d7c-9d19b190c785\" (UID: \"a5fbb923-ed64-4259-8d7c-9d19b190c785\") " Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.069439 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5fbb923-ed64-4259-8d7c-9d19b190c785-config-data\") pod \"a5fbb923-ed64-4259-8d7c-9d19b190c785\" (UID: \"a5fbb923-ed64-4259-8d7c-9d19b190c785\") " Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.069459 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a5fbb923-ed64-4259-8d7c-9d19b190c785-config-data-custom\") pod \"a5fbb923-ed64-4259-8d7c-9d19b190c785\" (UID: \"a5fbb923-ed64-4259-8d7c-9d19b190c785\") " Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.069487 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hqm5z\" (UniqueName: \"kubernetes.io/projected/a5fbb923-ed64-4259-8d7c-9d19b190c785-kube-api-access-hqm5z\") pod \"a5fbb923-ed64-4259-8d7c-9d19b190c785\" (UID: \"a5fbb923-ed64-4259-8d7c-9d19b190c785\") " Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.069507 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5fbb923-ed64-4259-8d7c-9d19b190c785-combined-ca-bundle\") pod \"a5fbb923-ed64-4259-8d7c-9d19b190c785\" (UID: \"a5fbb923-ed64-4259-8d7c-9d19b190c785\") " Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.069557 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5fbb923-ed64-4259-8d7c-9d19b190c785-scripts\") pod \"a5fbb923-ed64-4259-8d7c-9d19b190c785\" (UID: \"a5fbb923-ed64-4259-8d7c-9d19b190c785\") " Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.070779 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a5fbb923-ed64-4259-8d7c-9d19b190c785-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "a5fbb923-ed64-4259-8d7c-9d19b190c785" (UID: "a5fbb923-ed64-4259-8d7c-9d19b190c785"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.075471 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5fbb923-ed64-4259-8d7c-9d19b190c785-kube-api-access-hqm5z" (OuterVolumeSpecName: "kube-api-access-hqm5z") pod "a5fbb923-ed64-4259-8d7c-9d19b190c785" (UID: "a5fbb923-ed64-4259-8d7c-9d19b190c785"). InnerVolumeSpecName "kube-api-access-hqm5z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.075524 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5fbb923-ed64-4259-8d7c-9d19b190c785-scripts" (OuterVolumeSpecName: "scripts") pod "a5fbb923-ed64-4259-8d7c-9d19b190c785" (UID: "a5fbb923-ed64-4259-8d7c-9d19b190c785"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.083898 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5fbb923-ed64-4259-8d7c-9d19b190c785-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a5fbb923-ed64-4259-8d7c-9d19b190c785" (UID: "a5fbb923-ed64-4259-8d7c-9d19b190c785"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.122862 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5fbb923-ed64-4259-8d7c-9d19b190c785-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a5fbb923-ed64-4259-8d7c-9d19b190c785" (UID: "a5fbb923-ed64-4259-8d7c-9d19b190c785"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.167297 4693 generic.go:334] "Generic (PLEG): container finished" podID="a5fbb923-ed64-4259-8d7c-9d19b190c785" containerID="6dc75089e409c7dc0e1171bd9a0b0725c527e230b65d82b04805d9cf65fb962f" exitCode=0 Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.167350 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a5fbb923-ed64-4259-8d7c-9d19b190c785","Type":"ContainerDied","Data":"6dc75089e409c7dc0e1171bd9a0b0725c527e230b65d82b04805d9cf65fb962f"} Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.167361 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.167378 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a5fbb923-ed64-4259-8d7c-9d19b190c785","Type":"ContainerDied","Data":"d48c4dc770abba6db43ad7e0f125858a5854b31511abbb35323183c47301c8fd"} Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.167400 4693 scope.go:117] "RemoveContainer" containerID="0e3dce039fc6cd8d2c72e2996b77dc3c929665046e5e5051955506101f934f11" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.172158 4693 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a5fbb923-ed64-4259-8d7c-9d19b190c785-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.172192 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hqm5z\" (UniqueName: \"kubernetes.io/projected/a5fbb923-ed64-4259-8d7c-9d19b190c785-kube-api-access-hqm5z\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.172206 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5fbb923-ed64-4259-8d7c-9d19b190c785-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.172240 4693 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5fbb923-ed64-4259-8d7c-9d19b190c785-scripts\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.172252 4693 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a5fbb923-ed64-4259-8d7c-9d19b190c785-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.177196 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5fbb923-ed64-4259-8d7c-9d19b190c785-config-data" (OuterVolumeSpecName: "config-data") pod "a5fbb923-ed64-4259-8d7c-9d19b190c785" (UID: "a5fbb923-ed64-4259-8d7c-9d19b190c785"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.224435 4693 scope.go:117] "RemoveContainer" containerID="6dc75089e409c7dc0e1171bd9a0b0725c527e230b65d82b04805d9cf65fb962f" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.242367 4693 scope.go:117] "RemoveContainer" containerID="0e3dce039fc6cd8d2c72e2996b77dc3c929665046e5e5051955506101f934f11" Oct 08 07:34:27 crc kubenswrapper[4693]: E1008 07:34:27.242884 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e3dce039fc6cd8d2c72e2996b77dc3c929665046e5e5051955506101f934f11\": container with ID starting with 0e3dce039fc6cd8d2c72e2996b77dc3c929665046e5e5051955506101f934f11 not found: ID does not exist" containerID="0e3dce039fc6cd8d2c72e2996b77dc3c929665046e5e5051955506101f934f11" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.242927 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e3dce039fc6cd8d2c72e2996b77dc3c929665046e5e5051955506101f934f11"} err="failed to get container status \"0e3dce039fc6cd8d2c72e2996b77dc3c929665046e5e5051955506101f934f11\": rpc error: code = NotFound desc = could not find container \"0e3dce039fc6cd8d2c72e2996b77dc3c929665046e5e5051955506101f934f11\": container with ID starting with 0e3dce039fc6cd8d2c72e2996b77dc3c929665046e5e5051955506101f934f11 not found: ID does not exist" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.242954 4693 scope.go:117] "RemoveContainer" containerID="6dc75089e409c7dc0e1171bd9a0b0725c527e230b65d82b04805d9cf65fb962f" Oct 08 07:34:27 crc kubenswrapper[4693]: E1008 07:34:27.243265 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6dc75089e409c7dc0e1171bd9a0b0725c527e230b65d82b04805d9cf65fb962f\": container with ID starting with 6dc75089e409c7dc0e1171bd9a0b0725c527e230b65d82b04805d9cf65fb962f not found: ID does not exist" containerID="6dc75089e409c7dc0e1171bd9a0b0725c527e230b65d82b04805d9cf65fb962f" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.243318 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6dc75089e409c7dc0e1171bd9a0b0725c527e230b65d82b04805d9cf65fb962f"} err="failed to get container status \"6dc75089e409c7dc0e1171bd9a0b0725c527e230b65d82b04805d9cf65fb962f\": rpc error: code = NotFound desc = could not find container \"6dc75089e409c7dc0e1171bd9a0b0725c527e230b65d82b04805d9cf65fb962f\": container with ID starting with 6dc75089e409c7dc0e1171bd9a0b0725c527e230b65d82b04805d9cf65fb962f not found: ID does not exist" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.274714 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5fbb923-ed64-4259-8d7c-9d19b190c785-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.491589 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.500235 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.523142 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 08 07:34:27 crc kubenswrapper[4693]: E1008 07:34:27.523756 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5fbb923-ed64-4259-8d7c-9d19b190c785" containerName="probe" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.523847 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5fbb923-ed64-4259-8d7c-9d19b190c785" containerName="probe" Oct 08 07:34:27 crc kubenswrapper[4693]: E1008 07:34:27.523928 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5fbb923-ed64-4259-8d7c-9d19b190c785" containerName="cinder-scheduler" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.523956 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5fbb923-ed64-4259-8d7c-9d19b190c785" containerName="cinder-scheduler" Oct 08 07:34:27 crc kubenswrapper[4693]: E1008 07:34:27.523995 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9f4f1ac-3f4d-470e-95ee-567268c5ad43" containerName="barbican-api-log" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.524013 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9f4f1ac-3f4d-470e-95ee-567268c5ad43" containerName="barbican-api-log" Oct 08 07:34:27 crc kubenswrapper[4693]: E1008 07:34:27.524046 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c34ee0aa-60c1-4dce-8a22-5415eb6da004" containerName="init" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.524062 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="c34ee0aa-60c1-4dce-8a22-5415eb6da004" containerName="init" Oct 08 07:34:27 crc kubenswrapper[4693]: E1008 07:34:27.524095 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9f4f1ac-3f4d-470e-95ee-567268c5ad43" containerName="barbican-api" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.524112 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9f4f1ac-3f4d-470e-95ee-567268c5ad43" containerName="barbican-api" Oct 08 07:34:27 crc kubenswrapper[4693]: E1008 07:34:27.524137 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb" containerName="horizon" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.524154 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb" containerName="horizon" Oct 08 07:34:27 crc kubenswrapper[4693]: E1008 07:34:27.524186 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c34ee0aa-60c1-4dce-8a22-5415eb6da004" containerName="dnsmasq-dns" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.524204 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="c34ee0aa-60c1-4dce-8a22-5415eb6da004" containerName="dnsmasq-dns" Oct 08 07:34:27 crc kubenswrapper[4693]: E1008 07:34:27.524251 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb" containerName="horizon-log" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.524270 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb" containerName="horizon-log" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.524607 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9f4f1ac-3f4d-470e-95ee-567268c5ad43" containerName="barbican-api-log" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.524629 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb" containerName="horizon" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.524644 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="c34ee0aa-60c1-4dce-8a22-5415eb6da004" containerName="dnsmasq-dns" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.524657 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0794a6e-f5fe-4a0a-ae43-57e9bae8dbdb" containerName="horizon-log" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.524679 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5fbb923-ed64-4259-8d7c-9d19b190c785" containerName="probe" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.524691 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5fbb923-ed64-4259-8d7c-9d19b190c785" containerName="cinder-scheduler" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.524708 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9f4f1ac-3f4d-470e-95ee-567268c5ad43" containerName="barbican-api" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.527403 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.533987 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.626494 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b12dbfa-195c-43ea-ae2b-267a8733add4-scripts\") pod \"cinder-scheduler-0\" (UID: \"3b12dbfa-195c-43ea-ae2b-267a8733add4\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.626552 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3b12dbfa-195c-43ea-ae2b-267a8733add4-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"3b12dbfa-195c-43ea-ae2b-267a8733add4\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.626571 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2f79z\" (UniqueName: \"kubernetes.io/projected/3b12dbfa-195c-43ea-ae2b-267a8733add4-kube-api-access-2f79z\") pod \"cinder-scheduler-0\" (UID: \"3b12dbfa-195c-43ea-ae2b-267a8733add4\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.626607 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3b12dbfa-195c-43ea-ae2b-267a8733add4-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"3b12dbfa-195c-43ea-ae2b-267a8733add4\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.626638 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b12dbfa-195c-43ea-ae2b-267a8733add4-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"3b12dbfa-195c-43ea-ae2b-267a8733add4\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.626691 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b12dbfa-195c-43ea-ae2b-267a8733add4-config-data\") pod \"cinder-scheduler-0\" (UID: \"3b12dbfa-195c-43ea-ae2b-267a8733add4\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.694107 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.729932 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b12dbfa-195c-43ea-ae2b-267a8733add4-config-data\") pod \"cinder-scheduler-0\" (UID: \"3b12dbfa-195c-43ea-ae2b-267a8733add4\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.730086 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b12dbfa-195c-43ea-ae2b-267a8733add4-scripts\") pod \"cinder-scheduler-0\" (UID: \"3b12dbfa-195c-43ea-ae2b-267a8733add4\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.730119 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3b12dbfa-195c-43ea-ae2b-267a8733add4-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"3b12dbfa-195c-43ea-ae2b-267a8733add4\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.730163 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2f79z\" (UniqueName: \"kubernetes.io/projected/3b12dbfa-195c-43ea-ae2b-267a8733add4-kube-api-access-2f79z\") pod \"cinder-scheduler-0\" (UID: \"3b12dbfa-195c-43ea-ae2b-267a8733add4\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.730200 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3b12dbfa-195c-43ea-ae2b-267a8733add4-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"3b12dbfa-195c-43ea-ae2b-267a8733add4\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.730233 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b12dbfa-195c-43ea-ae2b-267a8733add4-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"3b12dbfa-195c-43ea-ae2b-267a8733add4\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.737504 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3b12dbfa-195c-43ea-ae2b-267a8733add4-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"3b12dbfa-195c-43ea-ae2b-267a8733add4\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.739198 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3b12dbfa-195c-43ea-ae2b-267a8733add4-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"3b12dbfa-195c-43ea-ae2b-267a8733add4\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.739324 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b12dbfa-195c-43ea-ae2b-267a8733add4-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"3b12dbfa-195c-43ea-ae2b-267a8733add4\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.752665 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b12dbfa-195c-43ea-ae2b-267a8733add4-scripts\") pod \"cinder-scheduler-0\" (UID: \"3b12dbfa-195c-43ea-ae2b-267a8733add4\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.754076 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b12dbfa-195c-43ea-ae2b-267a8733add4-config-data\") pod \"cinder-scheduler-0\" (UID: \"3b12dbfa-195c-43ea-ae2b-267a8733add4\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.755716 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2f79z\" (UniqueName: \"kubernetes.io/projected/3b12dbfa-195c-43ea-ae2b-267a8733add4-kube-api-access-2f79z\") pod \"cinder-scheduler-0\" (UID: \"3b12dbfa-195c-43ea-ae2b-267a8733add4\") " pod="openstack/cinder-scheduler-0" Oct 08 07:34:27 crc kubenswrapper[4693]: I1008 07:34:27.961777 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 08 07:34:28 crc kubenswrapper[4693]: I1008 07:34:28.420966 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 08 07:34:28 crc kubenswrapper[4693]: W1008 07:34:28.422697 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3b12dbfa_195c_43ea_ae2b_267a8733add4.slice/crio-13ae42bbd460ef012dc3120d2467e487618eefe4bdaff4e7f2c673b2064e422f WatchSource:0}: Error finding container 13ae42bbd460ef012dc3120d2467e487618eefe4bdaff4e7f2c673b2064e422f: Status 404 returned error can't find the container with id 13ae42bbd460ef012dc3120d2467e487618eefe4bdaff4e7f2c673b2064e422f Oct 08 07:34:29 crc kubenswrapper[4693]: I1008 07:34:29.203346 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3b12dbfa-195c-43ea-ae2b-267a8733add4","Type":"ContainerStarted","Data":"d86aad00ec72bc8cd3cb3983ecbae46573ad6e2d1e314602db3ea8da8f8957bf"} Oct 08 07:34:29 crc kubenswrapper[4693]: I1008 07:34:29.203760 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3b12dbfa-195c-43ea-ae2b-267a8733add4","Type":"ContainerStarted","Data":"13ae42bbd460ef012dc3120d2467e487618eefe4bdaff4e7f2c673b2064e422f"} Oct 08 07:34:29 crc kubenswrapper[4693]: I1008 07:34:29.206322 4693 generic.go:334] "Generic (PLEG): container finished" podID="3a935819-12b0-495b-944b-d74e091f176c" containerID="94146f4bfca7747b4b942bfe8395949b5670d17a4eec6657e0f6afb104e9df10" exitCode=0 Oct 08 07:34:29 crc kubenswrapper[4693]: I1008 07:34:29.206365 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7fd7c44d-xwrpr" event={"ID":"3a935819-12b0-495b-944b-d74e091f176c","Type":"ContainerDied","Data":"94146f4bfca7747b4b942bfe8395949b5670d17a4eec6657e0f6afb104e9df10"} Oct 08 07:34:29 crc kubenswrapper[4693]: I1008 07:34:29.379622 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5fbb923-ed64-4259-8d7c-9d19b190c785" path="/var/lib/kubelet/pods/a5fbb923-ed64-4259-8d7c-9d19b190c785/volumes" Oct 08 07:34:29 crc kubenswrapper[4693]: I1008 07:34:29.493371 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7fd7c44d-xwrpr" Oct 08 07:34:29 crc kubenswrapper[4693]: I1008 07:34:29.664807 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3a935819-12b0-495b-944b-d74e091f176c-config\") pod \"3a935819-12b0-495b-944b-d74e091f176c\" (UID: \"3a935819-12b0-495b-944b-d74e091f176c\") " Oct 08 07:34:29 crc kubenswrapper[4693]: I1008 07:34:29.664949 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3a935819-12b0-495b-944b-d74e091f176c-httpd-config\") pod \"3a935819-12b0-495b-944b-d74e091f176c\" (UID: \"3a935819-12b0-495b-944b-d74e091f176c\") " Oct 08 07:34:29 crc kubenswrapper[4693]: I1008 07:34:29.665047 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a935819-12b0-495b-944b-d74e091f176c-ovndb-tls-certs\") pod \"3a935819-12b0-495b-944b-d74e091f176c\" (UID: \"3a935819-12b0-495b-944b-d74e091f176c\") " Oct 08 07:34:29 crc kubenswrapper[4693]: I1008 07:34:29.665068 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjd2c\" (UniqueName: \"kubernetes.io/projected/3a935819-12b0-495b-944b-d74e091f176c-kube-api-access-sjd2c\") pod \"3a935819-12b0-495b-944b-d74e091f176c\" (UID: \"3a935819-12b0-495b-944b-d74e091f176c\") " Oct 08 07:34:29 crc kubenswrapper[4693]: I1008 07:34:29.665088 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a935819-12b0-495b-944b-d74e091f176c-combined-ca-bundle\") pod \"3a935819-12b0-495b-944b-d74e091f176c\" (UID: \"3a935819-12b0-495b-944b-d74e091f176c\") " Oct 08 07:34:29 crc kubenswrapper[4693]: I1008 07:34:29.670223 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a935819-12b0-495b-944b-d74e091f176c-kube-api-access-sjd2c" (OuterVolumeSpecName: "kube-api-access-sjd2c") pod "3a935819-12b0-495b-944b-d74e091f176c" (UID: "3a935819-12b0-495b-944b-d74e091f176c"). InnerVolumeSpecName "kube-api-access-sjd2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:34:29 crc kubenswrapper[4693]: I1008 07:34:29.685341 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a935819-12b0-495b-944b-d74e091f176c-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "3a935819-12b0-495b-944b-d74e091f176c" (UID: "3a935819-12b0-495b-944b-d74e091f176c"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:29 crc kubenswrapper[4693]: I1008 07:34:29.732277 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a935819-12b0-495b-944b-d74e091f176c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3a935819-12b0-495b-944b-d74e091f176c" (UID: "3a935819-12b0-495b-944b-d74e091f176c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:29 crc kubenswrapper[4693]: I1008 07:34:29.759861 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a935819-12b0-495b-944b-d74e091f176c-config" (OuterVolumeSpecName: "config") pod "3a935819-12b0-495b-944b-d74e091f176c" (UID: "3a935819-12b0-495b-944b-d74e091f176c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:29 crc kubenswrapper[4693]: I1008 07:34:29.767437 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjd2c\" (UniqueName: \"kubernetes.io/projected/3a935819-12b0-495b-944b-d74e091f176c-kube-api-access-sjd2c\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:29 crc kubenswrapper[4693]: I1008 07:34:29.767483 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a935819-12b0-495b-944b-d74e091f176c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:29 crc kubenswrapper[4693]: I1008 07:34:29.767501 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/3a935819-12b0-495b-944b-d74e091f176c-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:29 crc kubenswrapper[4693]: I1008 07:34:29.767520 4693 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3a935819-12b0-495b-944b-d74e091f176c-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:29 crc kubenswrapper[4693]: I1008 07:34:29.777806 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a935819-12b0-495b-944b-d74e091f176c-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "3a935819-12b0-495b-944b-d74e091f176c" (UID: "3a935819-12b0-495b-944b-d74e091f176c"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:29 crc kubenswrapper[4693]: I1008 07:34:29.869432 4693 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a935819-12b0-495b-944b-d74e091f176c-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:30 crc kubenswrapper[4693]: I1008 07:34:30.168554 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 08 07:34:30 crc kubenswrapper[4693]: I1008 07:34:30.253171 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3b12dbfa-195c-43ea-ae2b-267a8733add4","Type":"ContainerStarted","Data":"71cd424b4198cc7b1e1698396bd4e960d624b189860ec99b4da92c9c72965572"} Oct 08 07:34:30 crc kubenswrapper[4693]: I1008 07:34:30.260415 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7fd7c44d-xwrpr" event={"ID":"3a935819-12b0-495b-944b-d74e091f176c","Type":"ContainerDied","Data":"a2f09f509a1f529623ea01907dc482912f637f8e3da1baa23a33c51cf1c7677a"} Oct 08 07:34:30 crc kubenswrapper[4693]: I1008 07:34:30.260479 4693 scope.go:117] "RemoveContainer" containerID="75e0792a492c6155b18866c1c8b8c7919f159e31e7a4b2391a17fe20ee1f2010" Oct 08 07:34:30 crc kubenswrapper[4693]: I1008 07:34:30.260593 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7fd7c44d-xwrpr" Oct 08 07:34:30 crc kubenswrapper[4693]: I1008 07:34:30.265594 4693 generic.go:334] "Generic (PLEG): container finished" podID="4c13d244-5d68-4fdc-834e-90409425f7f4" containerID="1e19d1e84dee0ab2547ac52659a0dbc638b95ad225686a77cb411f0a920dc101" exitCode=0 Oct 08 07:34:30 crc kubenswrapper[4693]: I1008 07:34:30.265654 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-647ccf6b96-zrz9s" event={"ID":"4c13d244-5d68-4fdc-834e-90409425f7f4","Type":"ContainerDied","Data":"1e19d1e84dee0ab2547ac52659a0dbc638b95ad225686a77cb411f0a920dc101"} Oct 08 07:34:30 crc kubenswrapper[4693]: I1008 07:34:30.310404 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.310381247 podStartE2EDuration="3.310381247s" podCreationTimestamp="2025-10-08 07:34:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:34:30.290353033 +0000 UTC m=+1055.661317998" watchObservedRunningTime="2025-10-08 07:34:30.310381247 +0000 UTC m=+1055.681346182" Oct 08 07:34:30 crc kubenswrapper[4693]: I1008 07:34:30.311478 4693 scope.go:117] "RemoveContainer" containerID="94146f4bfca7747b4b942bfe8395949b5670d17a4eec6657e0f6afb104e9df10" Oct 08 07:34:30 crc kubenswrapper[4693]: I1008 07:34:30.321438 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7fd7c44d-xwrpr"] Oct 08 07:34:30 crc kubenswrapper[4693]: I1008 07:34:30.330955 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-7fd7c44d-xwrpr"] Oct 08 07:34:31 crc kubenswrapper[4693]: I1008 07:34:31.380621 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a935819-12b0-495b-944b-d74e091f176c" path="/var/lib/kubelet/pods/3a935819-12b0-495b-944b-d74e091f176c/volumes" Oct 08 07:34:31 crc kubenswrapper[4693]: I1008 07:34:31.719182 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-647ccf6b96-zrz9s" podUID="4c13d244-5d68-4fdc-834e-90409425f7f4" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.149:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.149:8443: connect: connection refused" Oct 08 07:34:32 crc kubenswrapper[4693]: I1008 07:34:32.439851 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-84d7c4f8cb-75jz5" Oct 08 07:34:32 crc kubenswrapper[4693]: I1008 07:34:32.470099 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-84d7c4f8cb-75jz5" Oct 08 07:34:32 crc kubenswrapper[4693]: I1008 07:34:32.962898 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 08 07:34:33 crc kubenswrapper[4693]: I1008 07:34:33.153563 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-5bbc7cbf94-5tkqs" Oct 08 07:34:35 crc kubenswrapper[4693]: I1008 07:34:35.673303 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 08 07:34:35 crc kubenswrapper[4693]: E1008 07:34:35.674109 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a935819-12b0-495b-944b-d74e091f176c" containerName="neutron-httpd" Oct 08 07:34:35 crc kubenswrapper[4693]: I1008 07:34:35.674121 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a935819-12b0-495b-944b-d74e091f176c" containerName="neutron-httpd" Oct 08 07:34:35 crc kubenswrapper[4693]: E1008 07:34:35.674141 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a935819-12b0-495b-944b-d74e091f176c" containerName="neutron-api" Oct 08 07:34:35 crc kubenswrapper[4693]: I1008 07:34:35.674148 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a935819-12b0-495b-944b-d74e091f176c" containerName="neutron-api" Oct 08 07:34:35 crc kubenswrapper[4693]: I1008 07:34:35.674316 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a935819-12b0-495b-944b-d74e091f176c" containerName="neutron-api" Oct 08 07:34:35 crc kubenswrapper[4693]: I1008 07:34:35.674330 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a935819-12b0-495b-944b-d74e091f176c" containerName="neutron-httpd" Oct 08 07:34:35 crc kubenswrapper[4693]: I1008 07:34:35.674982 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 08 07:34:35 crc kubenswrapper[4693]: I1008 07:34:35.678742 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Oct 08 07:34:35 crc kubenswrapper[4693]: I1008 07:34:35.678750 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Oct 08 07:34:35 crc kubenswrapper[4693]: I1008 07:34:35.679584 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-ntdfn" Oct 08 07:34:35 crc kubenswrapper[4693]: I1008 07:34:35.683555 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 08 07:34:35 crc kubenswrapper[4693]: I1008 07:34:35.798956 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/de8f5998-6e3d-4695-affe-f3afab2d2528-openstack-config\") pod \"openstackclient\" (UID: \"de8f5998-6e3d-4695-affe-f3afab2d2528\") " pod="openstack/openstackclient" Oct 08 07:34:35 crc kubenswrapper[4693]: I1008 07:34:35.799082 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de8f5998-6e3d-4695-affe-f3afab2d2528-combined-ca-bundle\") pod \"openstackclient\" (UID: \"de8f5998-6e3d-4695-affe-f3afab2d2528\") " pod="openstack/openstackclient" Oct 08 07:34:35 crc kubenswrapper[4693]: I1008 07:34:35.799147 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qshh\" (UniqueName: \"kubernetes.io/projected/de8f5998-6e3d-4695-affe-f3afab2d2528-kube-api-access-5qshh\") pod \"openstackclient\" (UID: \"de8f5998-6e3d-4695-affe-f3afab2d2528\") " pod="openstack/openstackclient" Oct 08 07:34:35 crc kubenswrapper[4693]: I1008 07:34:35.799208 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/de8f5998-6e3d-4695-affe-f3afab2d2528-openstack-config-secret\") pod \"openstackclient\" (UID: \"de8f5998-6e3d-4695-affe-f3afab2d2528\") " pod="openstack/openstackclient" Oct 08 07:34:35 crc kubenswrapper[4693]: I1008 07:34:35.900534 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de8f5998-6e3d-4695-affe-f3afab2d2528-combined-ca-bundle\") pod \"openstackclient\" (UID: \"de8f5998-6e3d-4695-affe-f3afab2d2528\") " pod="openstack/openstackclient" Oct 08 07:34:35 crc kubenswrapper[4693]: I1008 07:34:35.900579 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qshh\" (UniqueName: \"kubernetes.io/projected/de8f5998-6e3d-4695-affe-f3afab2d2528-kube-api-access-5qshh\") pod \"openstackclient\" (UID: \"de8f5998-6e3d-4695-affe-f3afab2d2528\") " pod="openstack/openstackclient" Oct 08 07:34:35 crc kubenswrapper[4693]: I1008 07:34:35.900632 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/de8f5998-6e3d-4695-affe-f3afab2d2528-openstack-config-secret\") pod \"openstackclient\" (UID: \"de8f5998-6e3d-4695-affe-f3afab2d2528\") " pod="openstack/openstackclient" Oct 08 07:34:35 crc kubenswrapper[4693]: I1008 07:34:35.900656 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/de8f5998-6e3d-4695-affe-f3afab2d2528-openstack-config\") pod \"openstackclient\" (UID: \"de8f5998-6e3d-4695-affe-f3afab2d2528\") " pod="openstack/openstackclient" Oct 08 07:34:35 crc kubenswrapper[4693]: I1008 07:34:35.901464 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/de8f5998-6e3d-4695-affe-f3afab2d2528-openstack-config\") pod \"openstackclient\" (UID: \"de8f5998-6e3d-4695-affe-f3afab2d2528\") " pod="openstack/openstackclient" Oct 08 07:34:35 crc kubenswrapper[4693]: I1008 07:34:35.907312 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de8f5998-6e3d-4695-affe-f3afab2d2528-combined-ca-bundle\") pod \"openstackclient\" (UID: \"de8f5998-6e3d-4695-affe-f3afab2d2528\") " pod="openstack/openstackclient" Oct 08 07:34:35 crc kubenswrapper[4693]: I1008 07:34:35.907358 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/de8f5998-6e3d-4695-affe-f3afab2d2528-openstack-config-secret\") pod \"openstackclient\" (UID: \"de8f5998-6e3d-4695-affe-f3afab2d2528\") " pod="openstack/openstackclient" Oct 08 07:34:35 crc kubenswrapper[4693]: I1008 07:34:35.917587 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qshh\" (UniqueName: \"kubernetes.io/projected/de8f5998-6e3d-4695-affe-f3afab2d2528-kube-api-access-5qshh\") pod \"openstackclient\" (UID: \"de8f5998-6e3d-4695-affe-f3afab2d2528\") " pod="openstack/openstackclient" Oct 08 07:34:36 crc kubenswrapper[4693]: I1008 07:34:36.004058 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 08 07:34:36 crc kubenswrapper[4693]: I1008 07:34:36.676026 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 08 07:34:36 crc kubenswrapper[4693]: E1008 07:34:36.744933 4693 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb4d301e9_d078_4876_a6a2_52a7c3b4dcbe.slice/crio-a26f89784069255b23e6d92b51296afb3716749d97d4e8e04150d1d52ec765cd\": RecentStats: unable to find data in memory cache]" Oct 08 07:34:37 crc kubenswrapper[4693]: I1008 07:34:37.342605 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"de8f5998-6e3d-4695-affe-f3afab2d2528","Type":"ContainerStarted","Data":"fb399caa3d362b7942ba98fcc02f656216e6ba8f0582c97c89f5a50150eb20b8"} Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.174143 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.639317 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-6db965f4c9-sszpw"] Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.641430 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.643319 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-6db965f4c9-sszpw"] Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.649189 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.649534 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.649686 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.757141 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgpdz\" (UniqueName: \"kubernetes.io/projected/ba5fbd22-39c2-49ae-a74f-ee328cb29a02-kube-api-access-fgpdz\") pod \"swift-proxy-6db965f4c9-sszpw\" (UID: \"ba5fbd22-39c2-49ae-a74f-ee328cb29a02\") " pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.757228 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ba5fbd22-39c2-49ae-a74f-ee328cb29a02-run-httpd\") pod \"swift-proxy-6db965f4c9-sszpw\" (UID: \"ba5fbd22-39c2-49ae-a74f-ee328cb29a02\") " pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.757264 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba5fbd22-39c2-49ae-a74f-ee328cb29a02-internal-tls-certs\") pod \"swift-proxy-6db965f4c9-sszpw\" (UID: \"ba5fbd22-39c2-49ae-a74f-ee328cb29a02\") " pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.757422 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba5fbd22-39c2-49ae-a74f-ee328cb29a02-combined-ca-bundle\") pod \"swift-proxy-6db965f4c9-sszpw\" (UID: \"ba5fbd22-39c2-49ae-a74f-ee328cb29a02\") " pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.757904 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/ba5fbd22-39c2-49ae-a74f-ee328cb29a02-etc-swift\") pod \"swift-proxy-6db965f4c9-sszpw\" (UID: \"ba5fbd22-39c2-49ae-a74f-ee328cb29a02\") " pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.757971 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba5fbd22-39c2-49ae-a74f-ee328cb29a02-config-data\") pod \"swift-proxy-6db965f4c9-sszpw\" (UID: \"ba5fbd22-39c2-49ae-a74f-ee328cb29a02\") " pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.758049 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba5fbd22-39c2-49ae-a74f-ee328cb29a02-public-tls-certs\") pod \"swift-proxy-6db965f4c9-sszpw\" (UID: \"ba5fbd22-39c2-49ae-a74f-ee328cb29a02\") " pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.758073 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ba5fbd22-39c2-49ae-a74f-ee328cb29a02-log-httpd\") pod \"swift-proxy-6db965f4c9-sszpw\" (UID: \"ba5fbd22-39c2-49ae-a74f-ee328cb29a02\") " pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.860483 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba5fbd22-39c2-49ae-a74f-ee328cb29a02-combined-ca-bundle\") pod \"swift-proxy-6db965f4c9-sszpw\" (UID: \"ba5fbd22-39c2-49ae-a74f-ee328cb29a02\") " pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.861780 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/ba5fbd22-39c2-49ae-a74f-ee328cb29a02-etc-swift\") pod \"swift-proxy-6db965f4c9-sszpw\" (UID: \"ba5fbd22-39c2-49ae-a74f-ee328cb29a02\") " pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.861919 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba5fbd22-39c2-49ae-a74f-ee328cb29a02-config-data\") pod \"swift-proxy-6db965f4c9-sszpw\" (UID: \"ba5fbd22-39c2-49ae-a74f-ee328cb29a02\") " pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.862060 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba5fbd22-39c2-49ae-a74f-ee328cb29a02-public-tls-certs\") pod \"swift-proxy-6db965f4c9-sszpw\" (UID: \"ba5fbd22-39c2-49ae-a74f-ee328cb29a02\") " pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.862163 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ba5fbd22-39c2-49ae-a74f-ee328cb29a02-log-httpd\") pod \"swift-proxy-6db965f4c9-sszpw\" (UID: \"ba5fbd22-39c2-49ae-a74f-ee328cb29a02\") " pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.862362 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgpdz\" (UniqueName: \"kubernetes.io/projected/ba5fbd22-39c2-49ae-a74f-ee328cb29a02-kube-api-access-fgpdz\") pod \"swift-proxy-6db965f4c9-sszpw\" (UID: \"ba5fbd22-39c2-49ae-a74f-ee328cb29a02\") " pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.862550 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ba5fbd22-39c2-49ae-a74f-ee328cb29a02-run-httpd\") pod \"swift-proxy-6db965f4c9-sszpw\" (UID: \"ba5fbd22-39c2-49ae-a74f-ee328cb29a02\") " pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.862767 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba5fbd22-39c2-49ae-a74f-ee328cb29a02-internal-tls-certs\") pod \"swift-proxy-6db965f4c9-sszpw\" (UID: \"ba5fbd22-39c2-49ae-a74f-ee328cb29a02\") " pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.863268 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ba5fbd22-39c2-49ae-a74f-ee328cb29a02-log-httpd\") pod \"swift-proxy-6db965f4c9-sszpw\" (UID: \"ba5fbd22-39c2-49ae-a74f-ee328cb29a02\") " pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.863476 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ba5fbd22-39c2-49ae-a74f-ee328cb29a02-run-httpd\") pod \"swift-proxy-6db965f4c9-sszpw\" (UID: \"ba5fbd22-39c2-49ae-a74f-ee328cb29a02\") " pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.868086 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba5fbd22-39c2-49ae-a74f-ee328cb29a02-public-tls-certs\") pod \"swift-proxy-6db965f4c9-sszpw\" (UID: \"ba5fbd22-39c2-49ae-a74f-ee328cb29a02\") " pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.868272 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba5fbd22-39c2-49ae-a74f-ee328cb29a02-config-data\") pod \"swift-proxy-6db965f4c9-sszpw\" (UID: \"ba5fbd22-39c2-49ae-a74f-ee328cb29a02\") " pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.868284 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba5fbd22-39c2-49ae-a74f-ee328cb29a02-combined-ca-bundle\") pod \"swift-proxy-6db965f4c9-sszpw\" (UID: \"ba5fbd22-39c2-49ae-a74f-ee328cb29a02\") " pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.870475 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba5fbd22-39c2-49ae-a74f-ee328cb29a02-internal-tls-certs\") pod \"swift-proxy-6db965f4c9-sszpw\" (UID: \"ba5fbd22-39c2-49ae-a74f-ee328cb29a02\") " pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.873388 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/ba5fbd22-39c2-49ae-a74f-ee328cb29a02-etc-swift\") pod \"swift-proxy-6db965f4c9-sszpw\" (UID: \"ba5fbd22-39c2-49ae-a74f-ee328cb29a02\") " pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.882267 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgpdz\" (UniqueName: \"kubernetes.io/projected/ba5fbd22-39c2-49ae-a74f-ee328cb29a02-kube-api-access-fgpdz\") pod \"swift-proxy-6db965f4c9-sszpw\" (UID: \"ba5fbd22-39c2-49ae-a74f-ee328cb29a02\") " pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:38 crc kubenswrapper[4693]: I1008 07:34:38.974622 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:39 crc kubenswrapper[4693]: I1008 07:34:39.572273 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-6db965f4c9-sszpw"] Oct 08 07:34:40 crc kubenswrapper[4693]: I1008 07:34:40.377458 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6db965f4c9-sszpw" event={"ID":"ba5fbd22-39c2-49ae-a74f-ee328cb29a02","Type":"ContainerStarted","Data":"1ae173f574c1098fc67fc7f575e4108dedf05f84108a0e025e2e157214c3f5a3"} Oct 08 07:34:40 crc kubenswrapper[4693]: I1008 07:34:40.377790 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6db965f4c9-sszpw" event={"ID":"ba5fbd22-39c2-49ae-a74f-ee328cb29a02","Type":"ContainerStarted","Data":"4d7c3e1b1a0e3e6dccc32d5bf0bf42de5cc0b636731a00eeba4bee23216f853f"} Oct 08 07:34:40 crc kubenswrapper[4693]: I1008 07:34:40.377800 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6db965f4c9-sszpw" event={"ID":"ba5fbd22-39c2-49ae-a74f-ee328cb29a02","Type":"ContainerStarted","Data":"1a1b00aaf0ec42877092a89eaee8ca76676b851bd14ceb093adeaf81d14af5ff"} Oct 08 07:34:40 crc kubenswrapper[4693]: I1008 07:34:40.377833 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:40 crc kubenswrapper[4693]: I1008 07:34:40.400702 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-6db965f4c9-sszpw" podStartSLOduration=2.400685035 podStartE2EDuration="2.400685035s" podCreationTimestamp="2025-10-08 07:34:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:34:40.394439461 +0000 UTC m=+1065.765404396" watchObservedRunningTime="2025-10-08 07:34:40.400685035 +0000 UTC m=+1065.771649970" Oct 08 07:34:40 crc kubenswrapper[4693]: I1008 07:34:40.703005 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:34:40 crc kubenswrapper[4693]: I1008 07:34:40.703398 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="35069ebb-6578-40d8-b9c6-5183fee2f040" containerName="ceilometer-central-agent" containerID="cri-o://0565fdd057666d10ff2afcc33108730e90cdffcb42d4b3587c1825446d7217ee" gracePeriod=30 Oct 08 07:34:40 crc kubenswrapper[4693]: I1008 07:34:40.703516 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="35069ebb-6578-40d8-b9c6-5183fee2f040" containerName="sg-core" containerID="cri-o://194798bfd074ca73594f4425588ec43fbe83ca6bd980280ea5f17ce08815f1f3" gracePeriod=30 Oct 08 07:34:40 crc kubenswrapper[4693]: I1008 07:34:40.703671 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="35069ebb-6578-40d8-b9c6-5183fee2f040" containerName="proxy-httpd" containerID="cri-o://3cad5619d28f9561c8e36a556f4556b1321bae61166da952af5e48a01a7a8052" gracePeriod=30 Oct 08 07:34:40 crc kubenswrapper[4693]: I1008 07:34:40.703737 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="35069ebb-6578-40d8-b9c6-5183fee2f040" containerName="ceilometer-notification-agent" containerID="cri-o://57cb649cfa6790752324650a73dcf14894d6feeebacf6aed8488675ed3635dbf" gracePeriod=30 Oct 08 07:34:40 crc kubenswrapper[4693]: I1008 07:34:40.717208 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="35069ebb-6578-40d8-b9c6-5183fee2f040" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.166:3000/\": EOF" Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.022305 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-8wxwp"] Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.023589 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-8wxwp" Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.038906 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-8wxwp"] Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.062705 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.062943 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="210e5669-78d1-47cb-8e46-e00c9764c2c7" containerName="glance-log" containerID="cri-o://20b8b158dd96d34de556d3d25ca53f3eccd970618c42b36bcdf3301c244bb29e" gracePeriod=30 Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.063316 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="210e5669-78d1-47cb-8e46-e00c9764c2c7" containerName="glance-httpd" containerID="cri-o://8b0653ccc1f9f1774b8e9b228531dd207898af52527e81451c392d21cdf86a27" gracePeriod=30 Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.101174 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46x7b\" (UniqueName: \"kubernetes.io/projected/82ff1555-b253-4980-afa9-ed42d9938ab7-kube-api-access-46x7b\") pod \"nova-api-db-create-8wxwp\" (UID: \"82ff1555-b253-4980-afa9-ed42d9938ab7\") " pod="openstack/nova-api-db-create-8wxwp" Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.129865 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-v5zq8"] Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.131209 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-v5zq8" Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.140051 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-v5zq8"] Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.203780 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfjzt\" (UniqueName: \"kubernetes.io/projected/77f1e46b-e47e-4b72-8e99-bffa5f5de30d-kube-api-access-kfjzt\") pod \"nova-cell0-db-create-v5zq8\" (UID: \"77f1e46b-e47e-4b72-8e99-bffa5f5de30d\") " pod="openstack/nova-cell0-db-create-v5zq8" Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.203960 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46x7b\" (UniqueName: \"kubernetes.io/projected/82ff1555-b253-4980-afa9-ed42d9938ab7-kube-api-access-46x7b\") pod \"nova-api-db-create-8wxwp\" (UID: \"82ff1555-b253-4980-afa9-ed42d9938ab7\") " pod="openstack/nova-api-db-create-8wxwp" Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.241019 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-mg9lm"] Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.242186 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-mg9lm" Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.253526 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46x7b\" (UniqueName: \"kubernetes.io/projected/82ff1555-b253-4980-afa9-ed42d9938ab7-kube-api-access-46x7b\") pod \"nova-api-db-create-8wxwp\" (UID: \"82ff1555-b253-4980-afa9-ed42d9938ab7\") " pod="openstack/nova-api-db-create-8wxwp" Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.264673 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-mg9lm"] Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.305877 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfjzt\" (UniqueName: \"kubernetes.io/projected/77f1e46b-e47e-4b72-8e99-bffa5f5de30d-kube-api-access-kfjzt\") pod \"nova-cell0-db-create-v5zq8\" (UID: \"77f1e46b-e47e-4b72-8e99-bffa5f5de30d\") " pod="openstack/nova-cell0-db-create-v5zq8" Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.339645 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-8wxwp" Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.347356 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfjzt\" (UniqueName: \"kubernetes.io/projected/77f1e46b-e47e-4b72-8e99-bffa5f5de30d-kube-api-access-kfjzt\") pod \"nova-cell0-db-create-v5zq8\" (UID: \"77f1e46b-e47e-4b72-8e99-bffa5f5de30d\") " pod="openstack/nova-cell0-db-create-v5zq8" Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.409834 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hh56\" (UniqueName: \"kubernetes.io/projected/e9942b69-54ad-4bf9-980b-45255f2e31a5-kube-api-access-7hh56\") pod \"nova-cell1-db-create-mg9lm\" (UID: \"e9942b69-54ad-4bf9-980b-45255f2e31a5\") " pod="openstack/nova-cell1-db-create-mg9lm" Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.412202 4693 generic.go:334] "Generic (PLEG): container finished" podID="210e5669-78d1-47cb-8e46-e00c9764c2c7" containerID="20b8b158dd96d34de556d3d25ca53f3eccd970618c42b36bcdf3301c244bb29e" exitCode=143 Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.412289 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"210e5669-78d1-47cb-8e46-e00c9764c2c7","Type":"ContainerDied","Data":"20b8b158dd96d34de556d3d25ca53f3eccd970618c42b36bcdf3301c244bb29e"} Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.420116 4693 generic.go:334] "Generic (PLEG): container finished" podID="35069ebb-6578-40d8-b9c6-5183fee2f040" containerID="3cad5619d28f9561c8e36a556f4556b1321bae61166da952af5e48a01a7a8052" exitCode=0 Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.420344 4693 generic.go:334] "Generic (PLEG): container finished" podID="35069ebb-6578-40d8-b9c6-5183fee2f040" containerID="194798bfd074ca73594f4425588ec43fbe83ca6bd980280ea5f17ce08815f1f3" exitCode=2 Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.420352 4693 generic.go:334] "Generic (PLEG): container finished" podID="35069ebb-6578-40d8-b9c6-5183fee2f040" containerID="0565fdd057666d10ff2afcc33108730e90cdffcb42d4b3587c1825446d7217ee" exitCode=0 Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.420861 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"35069ebb-6578-40d8-b9c6-5183fee2f040","Type":"ContainerDied","Data":"3cad5619d28f9561c8e36a556f4556b1321bae61166da952af5e48a01a7a8052"} Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.420921 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"35069ebb-6578-40d8-b9c6-5183fee2f040","Type":"ContainerDied","Data":"194798bfd074ca73594f4425588ec43fbe83ca6bd980280ea5f17ce08815f1f3"} Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.420938 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.420950 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"35069ebb-6578-40d8-b9c6-5183fee2f040","Type":"ContainerDied","Data":"0565fdd057666d10ff2afcc33108730e90cdffcb42d4b3587c1825446d7217ee"} Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.468976 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-v5zq8" Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.511380 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hh56\" (UniqueName: \"kubernetes.io/projected/e9942b69-54ad-4bf9-980b-45255f2e31a5-kube-api-access-7hh56\") pod \"nova-cell1-db-create-mg9lm\" (UID: \"e9942b69-54ad-4bf9-980b-45255f2e31a5\") " pod="openstack/nova-cell1-db-create-mg9lm" Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.542520 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hh56\" (UniqueName: \"kubernetes.io/projected/e9942b69-54ad-4bf9-980b-45255f2e31a5-kube-api-access-7hh56\") pod \"nova-cell1-db-create-mg9lm\" (UID: \"e9942b69-54ad-4bf9-980b-45255f2e31a5\") " pod="openstack/nova-cell1-db-create-mg9lm" Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.609527 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-mg9lm" Oct 08 07:34:41 crc kubenswrapper[4693]: I1008 07:34:41.718157 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-647ccf6b96-zrz9s" podUID="4c13d244-5d68-4fdc-834e-90409425f7f4" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.149:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.149:8443: connect: connection refused" Oct 08 07:34:45 crc kubenswrapper[4693]: I1008 07:34:45.462485 4693 generic.go:334] "Generic (PLEG): container finished" podID="210e5669-78d1-47cb-8e46-e00c9764c2c7" containerID="8b0653ccc1f9f1774b8e9b228531dd207898af52527e81451c392d21cdf86a27" exitCode=0 Oct 08 07:34:45 crc kubenswrapper[4693]: I1008 07:34:45.462566 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"210e5669-78d1-47cb-8e46-e00c9764c2c7","Type":"ContainerDied","Data":"8b0653ccc1f9f1774b8e9b228531dd207898af52527e81451c392d21cdf86a27"} Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.499129 4693 generic.go:334] "Generic (PLEG): container finished" podID="35069ebb-6578-40d8-b9c6-5183fee2f040" containerID="57cb649cfa6790752324650a73dcf14894d6feeebacf6aed8488675ed3635dbf" exitCode=0 Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.499212 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"35069ebb-6578-40d8-b9c6-5183fee2f040","Type":"ContainerDied","Data":"57cb649cfa6790752324650a73dcf14894d6feeebacf6aed8488675ed3635dbf"} Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.501222 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.655592 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/35069ebb-6578-40d8-b9c6-5183fee2f040-log-httpd\") pod \"35069ebb-6578-40d8-b9c6-5183fee2f040\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.655947 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35069ebb-6578-40d8-b9c6-5183fee2f040-scripts\") pod \"35069ebb-6578-40d8-b9c6-5183fee2f040\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.656051 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bz6g5\" (UniqueName: \"kubernetes.io/projected/35069ebb-6578-40d8-b9c6-5183fee2f040-kube-api-access-bz6g5\") pod \"35069ebb-6578-40d8-b9c6-5183fee2f040\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.656077 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35069ebb-6578-40d8-b9c6-5183fee2f040-config-data\") pod \"35069ebb-6578-40d8-b9c6-5183fee2f040\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.656221 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/35069ebb-6578-40d8-b9c6-5183fee2f040-sg-core-conf-yaml\") pod \"35069ebb-6578-40d8-b9c6-5183fee2f040\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.656279 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/35069ebb-6578-40d8-b9c6-5183fee2f040-run-httpd\") pod \"35069ebb-6578-40d8-b9c6-5183fee2f040\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.656342 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35069ebb-6578-40d8-b9c6-5183fee2f040-combined-ca-bundle\") pod \"35069ebb-6578-40d8-b9c6-5183fee2f040\" (UID: \"35069ebb-6578-40d8-b9c6-5183fee2f040\") " Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.660514 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35069ebb-6578-40d8-b9c6-5183fee2f040-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "35069ebb-6578-40d8-b9c6-5183fee2f040" (UID: "35069ebb-6578-40d8-b9c6-5183fee2f040"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.660949 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35069ebb-6578-40d8-b9c6-5183fee2f040-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "35069ebb-6578-40d8-b9c6-5183fee2f040" (UID: "35069ebb-6578-40d8-b9c6-5183fee2f040"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.662551 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35069ebb-6578-40d8-b9c6-5183fee2f040-kube-api-access-bz6g5" (OuterVolumeSpecName: "kube-api-access-bz6g5") pod "35069ebb-6578-40d8-b9c6-5183fee2f040" (UID: "35069ebb-6578-40d8-b9c6-5183fee2f040"). InnerVolumeSpecName "kube-api-access-bz6g5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.666978 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35069ebb-6578-40d8-b9c6-5183fee2f040-scripts" (OuterVolumeSpecName: "scripts") pod "35069ebb-6578-40d8-b9c6-5183fee2f040" (UID: "35069ebb-6578-40d8-b9c6-5183fee2f040"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.708843 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.761862 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bz6g5\" (UniqueName: \"kubernetes.io/projected/35069ebb-6578-40d8-b9c6-5183fee2f040-kube-api-access-bz6g5\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.761895 4693 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/35069ebb-6578-40d8-b9c6-5183fee2f040-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.761906 4693 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/35069ebb-6578-40d8-b9c6-5183fee2f040-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.761914 4693 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35069ebb-6578-40d8-b9c6-5183fee2f040-scripts\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.817699 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.825601 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="de2f6c37-8e32-4276-bbe3-9f5c404a18b3" containerName="glance-log" containerID="cri-o://53d8c21458e02a80b867ac8e0e974d9aa93fa90b186a36379f7f5e985e9ad685" gracePeriod=30 Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.826036 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="de2f6c37-8e32-4276-bbe3-9f5c404a18b3" containerName="glance-httpd" containerID="cri-o://faf4a2d70b7180f1e2b575fad7f563261e2b2f73d26cf1007620670089098961" gracePeriod=30 Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.848008 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35069ebb-6578-40d8-b9c6-5183fee2f040-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "35069ebb-6578-40d8-b9c6-5183fee2f040" (UID: "35069ebb-6578-40d8-b9c6-5183fee2f040"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.863697 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/210e5669-78d1-47cb-8e46-e00c9764c2c7-httpd-run\") pod \"210e5669-78d1-47cb-8e46-e00c9764c2c7\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.863747 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"210e5669-78d1-47cb-8e46-e00c9764c2c7\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.863851 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jtqw9\" (UniqueName: \"kubernetes.io/projected/210e5669-78d1-47cb-8e46-e00c9764c2c7-kube-api-access-jtqw9\") pod \"210e5669-78d1-47cb-8e46-e00c9764c2c7\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.863890 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/210e5669-78d1-47cb-8e46-e00c9764c2c7-public-tls-certs\") pod \"210e5669-78d1-47cb-8e46-e00c9764c2c7\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.863933 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/210e5669-78d1-47cb-8e46-e00c9764c2c7-logs\") pod \"210e5669-78d1-47cb-8e46-e00c9764c2c7\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.864019 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/210e5669-78d1-47cb-8e46-e00c9764c2c7-config-data\") pod \"210e5669-78d1-47cb-8e46-e00c9764c2c7\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.864037 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/210e5669-78d1-47cb-8e46-e00c9764c2c7-combined-ca-bundle\") pod \"210e5669-78d1-47cb-8e46-e00c9764c2c7\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.864064 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/210e5669-78d1-47cb-8e46-e00c9764c2c7-scripts\") pod \"210e5669-78d1-47cb-8e46-e00c9764c2c7\" (UID: \"210e5669-78d1-47cb-8e46-e00c9764c2c7\") " Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.864634 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-mg9lm"] Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.864714 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/210e5669-78d1-47cb-8e46-e00c9764c2c7-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "210e5669-78d1-47cb-8e46-e00c9764c2c7" (UID: "210e5669-78d1-47cb-8e46-e00c9764c2c7"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.865271 4693 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/35069ebb-6578-40d8-b9c6-5183fee2f040-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.865710 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/210e5669-78d1-47cb-8e46-e00c9764c2c7-logs" (OuterVolumeSpecName: "logs") pod "210e5669-78d1-47cb-8e46-e00c9764c2c7" (UID: "210e5669-78d1-47cb-8e46-e00c9764c2c7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:34:46 crc kubenswrapper[4693]: W1008 07:34:46.874982 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9942b69_54ad_4bf9_980b_45255f2e31a5.slice/crio-f241e4d87c1aba7ab988051ae8935d4ae0cb175058cd7218ea3634b3c8e7e094 WatchSource:0}: Error finding container f241e4d87c1aba7ab988051ae8935d4ae0cb175058cd7218ea3634b3c8e7e094: Status 404 returned error can't find the container with id f241e4d87c1aba7ab988051ae8935d4ae0cb175058cd7218ea3634b3c8e7e094 Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.890004 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210e5669-78d1-47cb-8e46-e00c9764c2c7-kube-api-access-jtqw9" (OuterVolumeSpecName: "kube-api-access-jtqw9") pod "210e5669-78d1-47cb-8e46-e00c9764c2c7" (UID: "210e5669-78d1-47cb-8e46-e00c9764c2c7"). InnerVolumeSpecName "kube-api-access-jtqw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.908936 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "210e5669-78d1-47cb-8e46-e00c9764c2c7" (UID: "210e5669-78d1-47cb-8e46-e00c9764c2c7"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.908952 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35069ebb-6578-40d8-b9c6-5183fee2f040-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "35069ebb-6578-40d8-b9c6-5183fee2f040" (UID: "35069ebb-6578-40d8-b9c6-5183fee2f040"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.909013 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35069ebb-6578-40d8-b9c6-5183fee2f040-config-data" (OuterVolumeSpecName: "config-data") pod "35069ebb-6578-40d8-b9c6-5183fee2f040" (UID: "35069ebb-6578-40d8-b9c6-5183fee2f040"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.917028 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210e5669-78d1-47cb-8e46-e00c9764c2c7-scripts" (OuterVolumeSpecName: "scripts") pod "210e5669-78d1-47cb-8e46-e00c9764c2c7" (UID: "210e5669-78d1-47cb-8e46-e00c9764c2c7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.961669 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210e5669-78d1-47cb-8e46-e00c9764c2c7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "210e5669-78d1-47cb-8e46-e00c9764c2c7" (UID: "210e5669-78d1-47cb-8e46-e00c9764c2c7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.972205 4693 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/210e5669-78d1-47cb-8e46-e00c9764c2c7-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.972238 4693 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.972247 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35069ebb-6578-40d8-b9c6-5183fee2f040-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.972257 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jtqw9\" (UniqueName: \"kubernetes.io/projected/210e5669-78d1-47cb-8e46-e00c9764c2c7-kube-api-access-jtqw9\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.972266 4693 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/210e5669-78d1-47cb-8e46-e00c9764c2c7-logs\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.972275 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35069ebb-6578-40d8-b9c6-5183fee2f040-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.972282 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/210e5669-78d1-47cb-8e46-e00c9764c2c7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.972289 4693 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/210e5669-78d1-47cb-8e46-e00c9764c2c7-scripts\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.981640 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-8wxwp"] Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.990471 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-v5zq8"] Oct 08 07:34:46 crc kubenswrapper[4693]: I1008 07:34:46.994127 4693 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.009553 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210e5669-78d1-47cb-8e46-e00c9764c2c7-config-data" (OuterVolumeSpecName: "config-data") pod "210e5669-78d1-47cb-8e46-e00c9764c2c7" (UID: "210e5669-78d1-47cb-8e46-e00c9764c2c7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.057357 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210e5669-78d1-47cb-8e46-e00c9764c2c7-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "210e5669-78d1-47cb-8e46-e00c9764c2c7" (UID: "210e5669-78d1-47cb-8e46-e00c9764c2c7"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.073915 4693 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/210e5669-78d1-47cb-8e46-e00c9764c2c7-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.073951 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/210e5669-78d1-47cb-8e46-e00c9764c2c7-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.073963 4693 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:47 crc kubenswrapper[4693]: E1008 07:34:47.186354 4693 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb4d301e9_d078_4876_a6a2_52a7c3b4dcbe.slice/crio-a26f89784069255b23e6d92b51296afb3716749d97d4e8e04150d1d52ec765cd\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde2f6c37_8e32_4276_bbe3_9f5c404a18b3.slice/crio-53d8c21458e02a80b867ac8e0e974d9aa93fa90b186a36379f7f5e985e9ad685.scope\": RecentStats: unable to find data in memory cache]" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.510905 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"de8f5998-6e3d-4695-affe-f3afab2d2528","Type":"ContainerStarted","Data":"db1bc53714ffb79286d088525a8970d5ac47a5a0e9d858f23652e63e2ac42544"} Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.512552 4693 generic.go:334] "Generic (PLEG): container finished" podID="77f1e46b-e47e-4b72-8e99-bffa5f5de30d" containerID="dedcbd4b017d22787241d60debd5f0f0e6fa2a7ef1c79674eea458c142ad9da1" exitCode=0 Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.512624 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-v5zq8" event={"ID":"77f1e46b-e47e-4b72-8e99-bffa5f5de30d","Type":"ContainerDied","Data":"dedcbd4b017d22787241d60debd5f0f0e6fa2a7ef1c79674eea458c142ad9da1"} Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.512646 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-v5zq8" event={"ID":"77f1e46b-e47e-4b72-8e99-bffa5f5de30d","Type":"ContainerStarted","Data":"5be7dfae8e1aa7c8594e694b08af308f7228e1ce19f8a34526346c028b0f1310"} Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.514320 4693 generic.go:334] "Generic (PLEG): container finished" podID="de2f6c37-8e32-4276-bbe3-9f5c404a18b3" containerID="53d8c21458e02a80b867ac8e0e974d9aa93fa90b186a36379f7f5e985e9ad685" exitCode=143 Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.514393 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"de2f6c37-8e32-4276-bbe3-9f5c404a18b3","Type":"ContainerDied","Data":"53d8c21458e02a80b867ac8e0e974d9aa93fa90b186a36379f7f5e985e9ad685"} Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.516395 4693 generic.go:334] "Generic (PLEG): container finished" podID="82ff1555-b253-4980-afa9-ed42d9938ab7" containerID="5e06bceb874defda4b53eb91fb0003f2718170972f149a667b6a3e2f1331fa09" exitCode=0 Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.516426 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-8wxwp" event={"ID":"82ff1555-b253-4980-afa9-ed42d9938ab7","Type":"ContainerDied","Data":"5e06bceb874defda4b53eb91fb0003f2718170972f149a667b6a3e2f1331fa09"} Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.516454 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-8wxwp" event={"ID":"82ff1555-b253-4980-afa9-ed42d9938ab7","Type":"ContainerStarted","Data":"14d5fc749371ebf430c1b357bb7c6a8e650406673ad3bce5f98d8396e42b5e8a"} Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.518092 4693 generic.go:334] "Generic (PLEG): container finished" podID="e9942b69-54ad-4bf9-980b-45255f2e31a5" containerID="bcec3ce3e35c44436bd8067fbfd3bb339af7a665c51d5667a115ae263a971667" exitCode=0 Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.518157 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-mg9lm" event={"ID":"e9942b69-54ad-4bf9-980b-45255f2e31a5","Type":"ContainerDied","Data":"bcec3ce3e35c44436bd8067fbfd3bb339af7a665c51d5667a115ae263a971667"} Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.518189 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-mg9lm" event={"ID":"e9942b69-54ad-4bf9-980b-45255f2e31a5","Type":"ContainerStarted","Data":"f241e4d87c1aba7ab988051ae8935d4ae0cb175058cd7218ea3634b3c8e7e094"} Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.520826 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"35069ebb-6578-40d8-b9c6-5183fee2f040","Type":"ContainerDied","Data":"a2c9d4aa4782462b18cd7219525efe718ef1a7d01f2cae33997f231ff1943361"} Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.520832 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.520861 4693 scope.go:117] "RemoveContainer" containerID="3cad5619d28f9561c8e36a556f4556b1321bae61166da952af5e48a01a7a8052" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.524228 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"210e5669-78d1-47cb-8e46-e00c9764c2c7","Type":"ContainerDied","Data":"a65371312968a2f31bd9aa6d30c2f19e9fc3505841829c52c1c53ac32cb7c3c1"} Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.524304 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.531646 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.90295163 podStartE2EDuration="12.531626842s" podCreationTimestamp="2025-10-08 07:34:35 +0000 UTC" firstStartedPulling="2025-10-08 07:34:36.676011696 +0000 UTC m=+1062.046976631" lastFinishedPulling="2025-10-08 07:34:46.304686908 +0000 UTC m=+1071.675651843" observedRunningTime="2025-10-08 07:34:47.525964711 +0000 UTC m=+1072.896929666" watchObservedRunningTime="2025-10-08 07:34:47.531626842 +0000 UTC m=+1072.902591777" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.559319 4693 scope.go:117] "RemoveContainer" containerID="194798bfd074ca73594f4425588ec43fbe83ca6bd980280ea5f17ce08815f1f3" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.576919 4693 scope.go:117] "RemoveContainer" containerID="57cb649cfa6790752324650a73dcf14894d6feeebacf6aed8488675ed3635dbf" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.597422 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.607842 4693 scope.go:117] "RemoveContainer" containerID="0565fdd057666d10ff2afcc33108730e90cdffcb42d4b3587c1825446d7217ee" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.609513 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.623004 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.632745 4693 scope.go:117] "RemoveContainer" containerID="8b0653ccc1f9f1774b8e9b228531dd207898af52527e81451c392d21cdf86a27" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.637227 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.648320 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 08 07:34:47 crc kubenswrapper[4693]: E1008 07:34:47.648775 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35069ebb-6578-40d8-b9c6-5183fee2f040" containerName="sg-core" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.648796 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="35069ebb-6578-40d8-b9c6-5183fee2f040" containerName="sg-core" Oct 08 07:34:47 crc kubenswrapper[4693]: E1008 07:34:47.648805 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35069ebb-6578-40d8-b9c6-5183fee2f040" containerName="ceilometer-central-agent" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.648825 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="35069ebb-6578-40d8-b9c6-5183fee2f040" containerName="ceilometer-central-agent" Oct 08 07:34:47 crc kubenswrapper[4693]: E1008 07:34:47.648839 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35069ebb-6578-40d8-b9c6-5183fee2f040" containerName="proxy-httpd" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.648846 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="35069ebb-6578-40d8-b9c6-5183fee2f040" containerName="proxy-httpd" Oct 08 07:34:47 crc kubenswrapper[4693]: E1008 07:34:47.648869 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35069ebb-6578-40d8-b9c6-5183fee2f040" containerName="ceilometer-notification-agent" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.648875 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="35069ebb-6578-40d8-b9c6-5183fee2f040" containerName="ceilometer-notification-agent" Oct 08 07:34:47 crc kubenswrapper[4693]: E1008 07:34:47.648890 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="210e5669-78d1-47cb-8e46-e00c9764c2c7" containerName="glance-httpd" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.648897 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="210e5669-78d1-47cb-8e46-e00c9764c2c7" containerName="glance-httpd" Oct 08 07:34:47 crc kubenswrapper[4693]: E1008 07:34:47.648919 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="210e5669-78d1-47cb-8e46-e00c9764c2c7" containerName="glance-log" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.648924 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="210e5669-78d1-47cb-8e46-e00c9764c2c7" containerName="glance-log" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.649078 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="35069ebb-6578-40d8-b9c6-5183fee2f040" containerName="ceilometer-central-agent" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.649091 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="35069ebb-6578-40d8-b9c6-5183fee2f040" containerName="proxy-httpd" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.649100 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="210e5669-78d1-47cb-8e46-e00c9764c2c7" containerName="glance-httpd" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.649110 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="35069ebb-6578-40d8-b9c6-5183fee2f040" containerName="ceilometer-notification-agent" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.649124 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="210e5669-78d1-47cb-8e46-e00c9764c2c7" containerName="glance-log" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.649135 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="35069ebb-6578-40d8-b9c6-5183fee2f040" containerName="sg-core" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.653271 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.658733 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.659564 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.671427 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.677111 4693 scope.go:117] "RemoveContainer" containerID="20b8b158dd96d34de556d3d25ca53f3eccd970618c42b36bcdf3301c244bb29e" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.709368 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.711314 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.717209 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.725153 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.734073 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.796640 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06c30f6d-189d-4e3e-98f3-156a7784963c-scripts\") pod \"glance-default-external-api-0\" (UID: \"06c30f6d-189d-4e3e-98f3-156a7784963c\") " pod="openstack/glance-default-external-api-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.796698 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/06c30f6d-189d-4e3e-98f3-156a7784963c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"06c30f6d-189d-4e3e-98f3-156a7784963c\") " pod="openstack/glance-default-external-api-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.796740 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/06c30f6d-189d-4e3e-98f3-156a7784963c-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"06c30f6d-189d-4e3e-98f3-156a7784963c\") " pod="openstack/glance-default-external-api-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.796903 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"06c30f6d-189d-4e3e-98f3-156a7784963c\") " pod="openstack/glance-default-external-api-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.796948 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06c30f6d-189d-4e3e-98f3-156a7784963c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"06c30f6d-189d-4e3e-98f3-156a7784963c\") " pod="openstack/glance-default-external-api-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.797009 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06c30f6d-189d-4e3e-98f3-156a7784963c-config-data\") pod \"glance-default-external-api-0\" (UID: \"06c30f6d-189d-4e3e-98f3-156a7784963c\") " pod="openstack/glance-default-external-api-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.797054 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrx5k\" (UniqueName: \"kubernetes.io/projected/06c30f6d-189d-4e3e-98f3-156a7784963c-kube-api-access-mrx5k\") pod \"glance-default-external-api-0\" (UID: \"06c30f6d-189d-4e3e-98f3-156a7784963c\") " pod="openstack/glance-default-external-api-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.797102 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/06c30f6d-189d-4e3e-98f3-156a7784963c-logs\") pod \"glance-default-external-api-0\" (UID: \"06c30f6d-189d-4e3e-98f3-156a7784963c\") " pod="openstack/glance-default-external-api-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.898332 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06c30f6d-189d-4e3e-98f3-156a7784963c-scripts\") pod \"glance-default-external-api-0\" (UID: \"06c30f6d-189d-4e3e-98f3-156a7784963c\") " pod="openstack/glance-default-external-api-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.898414 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/06c30f6d-189d-4e3e-98f3-156a7784963c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"06c30f6d-189d-4e3e-98f3-156a7784963c\") " pod="openstack/glance-default-external-api-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.898461 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/06c30f6d-189d-4e3e-98f3-156a7784963c-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"06c30f6d-189d-4e3e-98f3-156a7784963c\") " pod="openstack/glance-default-external-api-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.898512 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4nk62\" (UniqueName: \"kubernetes.io/projected/5f58544a-80c3-438f-ba73-9b83d095b3b3-kube-api-access-4nk62\") pod \"ceilometer-0\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " pod="openstack/ceilometer-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.898570 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"06c30f6d-189d-4e3e-98f3-156a7784963c\") " pod="openstack/glance-default-external-api-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.898610 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06c30f6d-189d-4e3e-98f3-156a7784963c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"06c30f6d-189d-4e3e-98f3-156a7784963c\") " pod="openstack/glance-default-external-api-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.898659 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06c30f6d-189d-4e3e-98f3-156a7784963c-config-data\") pod \"glance-default-external-api-0\" (UID: \"06c30f6d-189d-4e3e-98f3-156a7784963c\") " pod="openstack/glance-default-external-api-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.898691 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f58544a-80c3-438f-ba73-9b83d095b3b3-config-data\") pod \"ceilometer-0\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " pod="openstack/ceilometer-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.898736 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrx5k\" (UniqueName: \"kubernetes.io/projected/06c30f6d-189d-4e3e-98f3-156a7784963c-kube-api-access-mrx5k\") pod \"glance-default-external-api-0\" (UID: \"06c30f6d-189d-4e3e-98f3-156a7784963c\") " pod="openstack/glance-default-external-api-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.898771 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/06c30f6d-189d-4e3e-98f3-156a7784963c-logs\") pod \"glance-default-external-api-0\" (UID: \"06c30f6d-189d-4e3e-98f3-156a7784963c\") " pod="openstack/glance-default-external-api-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.898836 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f58544a-80c3-438f-ba73-9b83d095b3b3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " pod="openstack/ceilometer-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.898870 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f58544a-80c3-438f-ba73-9b83d095b3b3-scripts\") pod \"ceilometer-0\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " pod="openstack/ceilometer-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.898908 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5f58544a-80c3-438f-ba73-9b83d095b3b3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " pod="openstack/ceilometer-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.898953 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f58544a-80c3-438f-ba73-9b83d095b3b3-log-httpd\") pod \"ceilometer-0\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " pod="openstack/ceilometer-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.899028 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f58544a-80c3-438f-ba73-9b83d095b3b3-run-httpd\") pod \"ceilometer-0\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " pod="openstack/ceilometer-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.899506 4693 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"06c30f6d-189d-4e3e-98f3-156a7784963c\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-external-api-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.899748 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/06c30f6d-189d-4e3e-98f3-156a7784963c-logs\") pod \"glance-default-external-api-0\" (UID: \"06c30f6d-189d-4e3e-98f3-156a7784963c\") " pod="openstack/glance-default-external-api-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.899791 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/06c30f6d-189d-4e3e-98f3-156a7784963c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"06c30f6d-189d-4e3e-98f3-156a7784963c\") " pod="openstack/glance-default-external-api-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.911039 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06c30f6d-189d-4e3e-98f3-156a7784963c-scripts\") pod \"glance-default-external-api-0\" (UID: \"06c30f6d-189d-4e3e-98f3-156a7784963c\") " pod="openstack/glance-default-external-api-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.911534 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/06c30f6d-189d-4e3e-98f3-156a7784963c-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"06c30f6d-189d-4e3e-98f3-156a7784963c\") " pod="openstack/glance-default-external-api-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.912509 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06c30f6d-189d-4e3e-98f3-156a7784963c-config-data\") pod \"glance-default-external-api-0\" (UID: \"06c30f6d-189d-4e3e-98f3-156a7784963c\") " pod="openstack/glance-default-external-api-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.917507 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06c30f6d-189d-4e3e-98f3-156a7784963c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"06c30f6d-189d-4e3e-98f3-156a7784963c\") " pod="openstack/glance-default-external-api-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.924803 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrx5k\" (UniqueName: \"kubernetes.io/projected/06c30f6d-189d-4e3e-98f3-156a7784963c-kube-api-access-mrx5k\") pod \"glance-default-external-api-0\" (UID: \"06c30f6d-189d-4e3e-98f3-156a7784963c\") " pod="openstack/glance-default-external-api-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.951760 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"06c30f6d-189d-4e3e-98f3-156a7784963c\") " pod="openstack/glance-default-external-api-0" Oct 08 07:34:47 crc kubenswrapper[4693]: I1008 07:34:47.998243 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 08 07:34:48 crc kubenswrapper[4693]: I1008 07:34:48.001174 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f58544a-80c3-438f-ba73-9b83d095b3b3-config-data\") pod \"ceilometer-0\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " pod="openstack/ceilometer-0" Oct 08 07:34:48 crc kubenswrapper[4693]: I1008 07:34:48.001271 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f58544a-80c3-438f-ba73-9b83d095b3b3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " pod="openstack/ceilometer-0" Oct 08 07:34:48 crc kubenswrapper[4693]: I1008 07:34:48.001297 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f58544a-80c3-438f-ba73-9b83d095b3b3-scripts\") pod \"ceilometer-0\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " pod="openstack/ceilometer-0" Oct 08 07:34:48 crc kubenswrapper[4693]: I1008 07:34:48.001349 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5f58544a-80c3-438f-ba73-9b83d095b3b3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " pod="openstack/ceilometer-0" Oct 08 07:34:48 crc kubenswrapper[4693]: I1008 07:34:48.001382 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f58544a-80c3-438f-ba73-9b83d095b3b3-log-httpd\") pod \"ceilometer-0\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " pod="openstack/ceilometer-0" Oct 08 07:34:48 crc kubenswrapper[4693]: I1008 07:34:48.001462 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f58544a-80c3-438f-ba73-9b83d095b3b3-run-httpd\") pod \"ceilometer-0\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " pod="openstack/ceilometer-0" Oct 08 07:34:48 crc kubenswrapper[4693]: I1008 07:34:48.001628 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4nk62\" (UniqueName: \"kubernetes.io/projected/5f58544a-80c3-438f-ba73-9b83d095b3b3-kube-api-access-4nk62\") pod \"ceilometer-0\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " pod="openstack/ceilometer-0" Oct 08 07:34:48 crc kubenswrapper[4693]: I1008 07:34:48.002243 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f58544a-80c3-438f-ba73-9b83d095b3b3-log-httpd\") pod \"ceilometer-0\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " pod="openstack/ceilometer-0" Oct 08 07:34:48 crc kubenswrapper[4693]: I1008 07:34:48.002585 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f58544a-80c3-438f-ba73-9b83d095b3b3-run-httpd\") pod \"ceilometer-0\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " pod="openstack/ceilometer-0" Oct 08 07:34:48 crc kubenswrapper[4693]: I1008 07:34:48.005150 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f58544a-80c3-438f-ba73-9b83d095b3b3-config-data\") pod \"ceilometer-0\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " pod="openstack/ceilometer-0" Oct 08 07:34:48 crc kubenswrapper[4693]: I1008 07:34:48.006975 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f58544a-80c3-438f-ba73-9b83d095b3b3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " pod="openstack/ceilometer-0" Oct 08 07:34:48 crc kubenswrapper[4693]: I1008 07:34:48.007461 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5f58544a-80c3-438f-ba73-9b83d095b3b3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " pod="openstack/ceilometer-0" Oct 08 07:34:48 crc kubenswrapper[4693]: I1008 07:34:48.007896 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f58544a-80c3-438f-ba73-9b83d095b3b3-scripts\") pod \"ceilometer-0\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " pod="openstack/ceilometer-0" Oct 08 07:34:48 crc kubenswrapper[4693]: I1008 07:34:48.020503 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4nk62\" (UniqueName: \"kubernetes.io/projected/5f58544a-80c3-438f-ba73-9b83d095b3b3-kube-api-access-4nk62\") pod \"ceilometer-0\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " pod="openstack/ceilometer-0" Oct 08 07:34:48 crc kubenswrapper[4693]: I1008 07:34:48.041174 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 08 07:34:48 crc kubenswrapper[4693]: I1008 07:34:48.595437 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:34:48 crc kubenswrapper[4693]: W1008 07:34:48.605640 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f58544a_80c3_438f_ba73_9b83d095b3b3.slice/crio-76c588659c94004b0346e9538401ccfb680799f61a4733ada86848e92ce42f4b WatchSource:0}: Error finding container 76c588659c94004b0346e9538401ccfb680799f61a4733ada86848e92ce42f4b: Status 404 returned error can't find the container with id 76c588659c94004b0346e9538401ccfb680799f61a4733ada86848e92ce42f4b Oct 08 07:34:48 crc kubenswrapper[4693]: I1008 07:34:48.608197 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 08 07:34:48 crc kubenswrapper[4693]: I1008 07:34:48.984251 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:48 crc kubenswrapper[4693]: I1008 07:34:48.991315 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-6db965f4c9-sszpw" Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.002304 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-8wxwp" Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.007882 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-mg9lm" Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.021409 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-v5zq8" Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.125654 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46x7b\" (UniqueName: \"kubernetes.io/projected/82ff1555-b253-4980-afa9-ed42d9938ab7-kube-api-access-46x7b\") pod \"82ff1555-b253-4980-afa9-ed42d9938ab7\" (UID: \"82ff1555-b253-4980-afa9-ed42d9938ab7\") " Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.125725 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7hh56\" (UniqueName: \"kubernetes.io/projected/e9942b69-54ad-4bf9-980b-45255f2e31a5-kube-api-access-7hh56\") pod \"e9942b69-54ad-4bf9-980b-45255f2e31a5\" (UID: \"e9942b69-54ad-4bf9-980b-45255f2e31a5\") " Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.125898 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfjzt\" (UniqueName: \"kubernetes.io/projected/77f1e46b-e47e-4b72-8e99-bffa5f5de30d-kube-api-access-kfjzt\") pod \"77f1e46b-e47e-4b72-8e99-bffa5f5de30d\" (UID: \"77f1e46b-e47e-4b72-8e99-bffa5f5de30d\") " Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.133944 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82ff1555-b253-4980-afa9-ed42d9938ab7-kube-api-access-46x7b" (OuterVolumeSpecName: "kube-api-access-46x7b") pod "82ff1555-b253-4980-afa9-ed42d9938ab7" (UID: "82ff1555-b253-4980-afa9-ed42d9938ab7"). InnerVolumeSpecName "kube-api-access-46x7b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.133983 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9942b69-54ad-4bf9-980b-45255f2e31a5-kube-api-access-7hh56" (OuterVolumeSpecName: "kube-api-access-7hh56") pod "e9942b69-54ad-4bf9-980b-45255f2e31a5" (UID: "e9942b69-54ad-4bf9-980b-45255f2e31a5"). InnerVolumeSpecName "kube-api-access-7hh56". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.136981 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77f1e46b-e47e-4b72-8e99-bffa5f5de30d-kube-api-access-kfjzt" (OuterVolumeSpecName: "kube-api-access-kfjzt") pod "77f1e46b-e47e-4b72-8e99-bffa5f5de30d" (UID: "77f1e46b-e47e-4b72-8e99-bffa5f5de30d"). InnerVolumeSpecName "kube-api-access-kfjzt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.227513 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfjzt\" (UniqueName: \"kubernetes.io/projected/77f1e46b-e47e-4b72-8e99-bffa5f5de30d-kube-api-access-kfjzt\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.227536 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46x7b\" (UniqueName: \"kubernetes.io/projected/82ff1555-b253-4980-afa9-ed42d9938ab7-kube-api-access-46x7b\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.227546 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7hh56\" (UniqueName: \"kubernetes.io/projected/e9942b69-54ad-4bf9-980b-45255f2e31a5-kube-api-access-7hh56\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.333601 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.372587 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210e5669-78d1-47cb-8e46-e00c9764c2c7" path="/var/lib/kubelet/pods/210e5669-78d1-47cb-8e46-e00c9764c2c7/volumes" Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.373547 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35069ebb-6578-40d8-b9c6-5183fee2f040" path="/var/lib/kubelet/pods/35069ebb-6578-40d8-b9c6-5183fee2f040/volumes" Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.555275 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-8wxwp" event={"ID":"82ff1555-b253-4980-afa9-ed42d9938ab7","Type":"ContainerDied","Data":"14d5fc749371ebf430c1b357bb7c6a8e650406673ad3bce5f98d8396e42b5e8a"} Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.555329 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="14d5fc749371ebf430c1b357bb7c6a8e650406673ad3bce5f98d8396e42b5e8a" Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.555711 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-8wxwp" Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.558415 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f58544a-80c3-438f-ba73-9b83d095b3b3","Type":"ContainerStarted","Data":"709ab9aabcedb6747b30d52514a99ddf86bffd135eeeaf22b5581fece090e7b9"} Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.558620 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f58544a-80c3-438f-ba73-9b83d095b3b3","Type":"ContainerStarted","Data":"76c588659c94004b0346e9538401ccfb680799f61a4733ada86848e92ce42f4b"} Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.561546 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-mg9lm" Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.561575 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-mg9lm" event={"ID":"e9942b69-54ad-4bf9-980b-45255f2e31a5","Type":"ContainerDied","Data":"f241e4d87c1aba7ab988051ae8935d4ae0cb175058cd7218ea3634b3c8e7e094"} Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.561988 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f241e4d87c1aba7ab988051ae8935d4ae0cb175058cd7218ea3634b3c8e7e094" Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.565677 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"06c30f6d-189d-4e3e-98f3-156a7784963c","Type":"ContainerStarted","Data":"1bac17ceb6f4bd99bcbfdabc6c6da0d48950ac2eeb58c0d9a88e31afe03dc35e"} Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.565706 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"06c30f6d-189d-4e3e-98f3-156a7784963c","Type":"ContainerStarted","Data":"38e200ce97f37ad1c1d0f1670c53ba48e3d52d50b100f86b43404b63a6ffc906"} Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.568385 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-v5zq8" Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.568750 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-v5zq8" event={"ID":"77f1e46b-e47e-4b72-8e99-bffa5f5de30d","Type":"ContainerDied","Data":"5be7dfae8e1aa7c8594e694b08af308f7228e1ce19f8a34526346c028b0f1310"} Oct 08 07:34:49 crc kubenswrapper[4693]: I1008 07:34:49.568768 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5be7dfae8e1aa7c8594e694b08af308f7228e1ce19f8a34526346c028b0f1310" Oct 08 07:34:50 crc kubenswrapper[4693]: I1008 07:34:50.464101 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="de2f6c37-8e32-4276-bbe3-9f5c404a18b3" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.152:9292/healthcheck\": dial tcp 10.217.0.152:9292: connect: connection refused" Oct 08 07:34:50 crc kubenswrapper[4693]: I1008 07:34:50.464191 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="de2f6c37-8e32-4276-bbe3-9f5c404a18b3" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.152:9292/healthcheck\": dial tcp 10.217.0.152:9292: connect: connection refused" Oct 08 07:34:50 crc kubenswrapper[4693]: I1008 07:34:50.579244 4693 generic.go:334] "Generic (PLEG): container finished" podID="de2f6c37-8e32-4276-bbe3-9f5c404a18b3" containerID="faf4a2d70b7180f1e2b575fad7f563261e2b2f73d26cf1007620670089098961" exitCode=0 Oct 08 07:34:50 crc kubenswrapper[4693]: I1008 07:34:50.579286 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"de2f6c37-8e32-4276-bbe3-9f5c404a18b3","Type":"ContainerDied","Data":"faf4a2d70b7180f1e2b575fad7f563261e2b2f73d26cf1007620670089098961"} Oct 08 07:34:50 crc kubenswrapper[4693]: I1008 07:34:50.581183 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f58544a-80c3-438f-ba73-9b83d095b3b3","Type":"ContainerStarted","Data":"dc6f87c7fb37695da0fdb4c4084e634fe8789a8f428290932111fb30cdc853f9"} Oct 08 07:34:50 crc kubenswrapper[4693]: I1008 07:34:50.582432 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"06c30f6d-189d-4e3e-98f3-156a7784963c","Type":"ContainerStarted","Data":"0e9c7bcc71021ceb0387dc9cbfd91cca61549cabbdf1eb47162cf03ef4e0722a"} Oct 08 07:34:50 crc kubenswrapper[4693]: I1008 07:34:50.611551 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.611535595 podStartE2EDuration="3.611535595s" podCreationTimestamp="2025-10-08 07:34:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:34:50.607342688 +0000 UTC m=+1075.978307623" watchObservedRunningTime="2025-10-08 07:34:50.611535595 +0000 UTC m=+1075.982500530" Oct 08 07:34:50 crc kubenswrapper[4693]: I1008 07:34:50.813560 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 08 07:34:50 crc kubenswrapper[4693]: I1008 07:34:50.965658 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-internal-tls-certs\") pod \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " Oct 08 07:34:50 crc kubenswrapper[4693]: I1008 07:34:50.966035 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-scripts\") pod \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " Oct 08 07:34:50 crc kubenswrapper[4693]: I1008 07:34:50.966070 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-config-data\") pod \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " Oct 08 07:34:50 crc kubenswrapper[4693]: I1008 07:34:50.966163 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " Oct 08 07:34:50 crc kubenswrapper[4693]: I1008 07:34:50.966203 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5ks6z\" (UniqueName: \"kubernetes.io/projected/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-kube-api-access-5ks6z\") pod \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " Oct 08 07:34:50 crc kubenswrapper[4693]: I1008 07:34:50.966220 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-combined-ca-bundle\") pod \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " Oct 08 07:34:50 crc kubenswrapper[4693]: I1008 07:34:50.966267 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-logs\") pod \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " Oct 08 07:34:50 crc kubenswrapper[4693]: I1008 07:34:50.966313 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-httpd-run\") pod \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\" (UID: \"de2f6c37-8e32-4276-bbe3-9f5c404a18b3\") " Oct 08 07:34:50 crc kubenswrapper[4693]: I1008 07:34:50.966929 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "de2f6c37-8e32-4276-bbe3-9f5c404a18b3" (UID: "de2f6c37-8e32-4276-bbe3-9f5c404a18b3"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:34:50 crc kubenswrapper[4693]: I1008 07:34:50.970550 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "de2f6c37-8e32-4276-bbe3-9f5c404a18b3" (UID: "de2f6c37-8e32-4276-bbe3-9f5c404a18b3"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 08 07:34:50 crc kubenswrapper[4693]: I1008 07:34:50.970902 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-logs" (OuterVolumeSpecName: "logs") pod "de2f6c37-8e32-4276-bbe3-9f5c404a18b3" (UID: "de2f6c37-8e32-4276-bbe3-9f5c404a18b3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:34:50 crc kubenswrapper[4693]: I1008 07:34:50.971351 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-kube-api-access-5ks6z" (OuterVolumeSpecName: "kube-api-access-5ks6z") pod "de2f6c37-8e32-4276-bbe3-9f5c404a18b3" (UID: "de2f6c37-8e32-4276-bbe3-9f5c404a18b3"). InnerVolumeSpecName "kube-api-access-5ks6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:34:50 crc kubenswrapper[4693]: I1008 07:34:50.988883 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-scripts" (OuterVolumeSpecName: "scripts") pod "de2f6c37-8e32-4276-bbe3-9f5c404a18b3" (UID: "de2f6c37-8e32-4276-bbe3-9f5c404a18b3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:50 crc kubenswrapper[4693]: I1008 07:34:50.993132 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "de2f6c37-8e32-4276-bbe3-9f5c404a18b3" (UID: "de2f6c37-8e32-4276-bbe3-9f5c404a18b3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.025066 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "de2f6c37-8e32-4276-bbe3-9f5c404a18b3" (UID: "de2f6c37-8e32-4276-bbe3-9f5c404a18b3"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.028057 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-config-data" (OuterVolumeSpecName: "config-data") pod "de2f6c37-8e32-4276-bbe3-9f5c404a18b3" (UID: "de2f6c37-8e32-4276-bbe3-9f5c404a18b3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.068062 4693 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.068091 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5ks6z\" (UniqueName: \"kubernetes.io/projected/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-kube-api-access-5ks6z\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.068101 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.068111 4693 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-logs\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.068131 4693 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.068141 4693 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.068152 4693 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-scripts\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.068163 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de2f6c37-8e32-4276-bbe3-9f5c404a18b3-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.089759 4693 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.169718 4693 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.592396 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"de2f6c37-8e32-4276-bbe3-9f5c404a18b3","Type":"ContainerDied","Data":"8f63e789397696ce043caf6fe805789004d736bbfd8b47f4e1f34949eb4da4ac"} Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.592620 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.592686 4693 scope.go:117] "RemoveContainer" containerID="faf4a2d70b7180f1e2b575fad7f563261e2b2f73d26cf1007620670089098961" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.598108 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f58544a-80c3-438f-ba73-9b83d095b3b3","Type":"ContainerStarted","Data":"415b1473955b634edb41cad4e81485d03606c73826ceb376b76e67c33f0e7a15"} Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.620064 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.631623 4693 scope.go:117] "RemoveContainer" containerID="53d8c21458e02a80b867ac8e0e974d9aa93fa90b186a36379f7f5e985e9ad685" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.635825 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.648857 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 08 07:34:51 crc kubenswrapper[4693]: E1008 07:34:51.649260 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9942b69-54ad-4bf9-980b-45255f2e31a5" containerName="mariadb-database-create" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.649286 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9942b69-54ad-4bf9-980b-45255f2e31a5" containerName="mariadb-database-create" Oct 08 07:34:51 crc kubenswrapper[4693]: E1008 07:34:51.649304 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de2f6c37-8e32-4276-bbe3-9f5c404a18b3" containerName="glance-log" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.649313 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="de2f6c37-8e32-4276-bbe3-9f5c404a18b3" containerName="glance-log" Oct 08 07:34:51 crc kubenswrapper[4693]: E1008 07:34:51.649352 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de2f6c37-8e32-4276-bbe3-9f5c404a18b3" containerName="glance-httpd" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.649360 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="de2f6c37-8e32-4276-bbe3-9f5c404a18b3" containerName="glance-httpd" Oct 08 07:34:51 crc kubenswrapper[4693]: E1008 07:34:51.649383 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82ff1555-b253-4980-afa9-ed42d9938ab7" containerName="mariadb-database-create" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.649393 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="82ff1555-b253-4980-afa9-ed42d9938ab7" containerName="mariadb-database-create" Oct 08 07:34:51 crc kubenswrapper[4693]: E1008 07:34:51.649403 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77f1e46b-e47e-4b72-8e99-bffa5f5de30d" containerName="mariadb-database-create" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.649411 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="77f1e46b-e47e-4b72-8e99-bffa5f5de30d" containerName="mariadb-database-create" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.649609 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9942b69-54ad-4bf9-980b-45255f2e31a5" containerName="mariadb-database-create" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.649649 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="77f1e46b-e47e-4b72-8e99-bffa5f5de30d" containerName="mariadb-database-create" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.649668 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="82ff1555-b253-4980-afa9-ed42d9938ab7" containerName="mariadb-database-create" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.649678 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="de2f6c37-8e32-4276-bbe3-9f5c404a18b3" containerName="glance-httpd" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.649696 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="de2f6c37-8e32-4276-bbe3-9f5c404a18b3" containerName="glance-log" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.651647 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.658598 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.659054 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.679530 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.719000 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-647ccf6b96-zrz9s" podUID="4c13d244-5d68-4fdc-834e-90409425f7f4" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.149:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.149:8443: connect: connection refused" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.719094 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.781433 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d34c5891-320c-402e-9ee6-0f75ba7e2bbb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d34c5891-320c-402e-9ee6-0f75ba7e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.781476 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d34c5891-320c-402e-9ee6-0f75ba7e2bbb-logs\") pod \"glance-default-internal-api-0\" (UID: \"d34c5891-320c-402e-9ee6-0f75ba7e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.781496 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d34c5891-320c-402e-9ee6-0f75ba7e2bbb-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d34c5891-320c-402e-9ee6-0f75ba7e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.781603 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d34c5891-320c-402e-9ee6-0f75ba7e2bbb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d34c5891-320c-402e-9ee6-0f75ba7e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.781643 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gh5rr\" (UniqueName: \"kubernetes.io/projected/d34c5891-320c-402e-9ee6-0f75ba7e2bbb-kube-api-access-gh5rr\") pod \"glance-default-internal-api-0\" (UID: \"d34c5891-320c-402e-9ee6-0f75ba7e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.781676 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"d34c5891-320c-402e-9ee6-0f75ba7e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.781733 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d34c5891-320c-402e-9ee6-0f75ba7e2bbb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d34c5891-320c-402e-9ee6-0f75ba7e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.781750 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d34c5891-320c-402e-9ee6-0f75ba7e2bbb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d34c5891-320c-402e-9ee6-0f75ba7e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.882932 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d34c5891-320c-402e-9ee6-0f75ba7e2bbb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d34c5891-320c-402e-9ee6-0f75ba7e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.882982 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gh5rr\" (UniqueName: \"kubernetes.io/projected/d34c5891-320c-402e-9ee6-0f75ba7e2bbb-kube-api-access-gh5rr\") pod \"glance-default-internal-api-0\" (UID: \"d34c5891-320c-402e-9ee6-0f75ba7e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.883010 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"d34c5891-320c-402e-9ee6-0f75ba7e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.883037 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d34c5891-320c-402e-9ee6-0f75ba7e2bbb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d34c5891-320c-402e-9ee6-0f75ba7e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.883054 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d34c5891-320c-402e-9ee6-0f75ba7e2bbb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d34c5891-320c-402e-9ee6-0f75ba7e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.883101 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d34c5891-320c-402e-9ee6-0f75ba7e2bbb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d34c5891-320c-402e-9ee6-0f75ba7e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.883118 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d34c5891-320c-402e-9ee6-0f75ba7e2bbb-logs\") pod \"glance-default-internal-api-0\" (UID: \"d34c5891-320c-402e-9ee6-0f75ba7e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.883133 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d34c5891-320c-402e-9ee6-0f75ba7e2bbb-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d34c5891-320c-402e-9ee6-0f75ba7e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.884216 4693 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"d34c5891-320c-402e-9ee6-0f75ba7e2bbb\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-internal-api-0" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.884398 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d34c5891-320c-402e-9ee6-0f75ba7e2bbb-logs\") pod \"glance-default-internal-api-0\" (UID: \"d34c5891-320c-402e-9ee6-0f75ba7e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.884575 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d34c5891-320c-402e-9ee6-0f75ba7e2bbb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d34c5891-320c-402e-9ee6-0f75ba7e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.890605 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d34c5891-320c-402e-9ee6-0f75ba7e2bbb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d34c5891-320c-402e-9ee6-0f75ba7e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.890684 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d34c5891-320c-402e-9ee6-0f75ba7e2bbb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d34c5891-320c-402e-9ee6-0f75ba7e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.891358 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d34c5891-320c-402e-9ee6-0f75ba7e2bbb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d34c5891-320c-402e-9ee6-0f75ba7e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.903872 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d34c5891-320c-402e-9ee6-0f75ba7e2bbb-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d34c5891-320c-402e-9ee6-0f75ba7e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.907918 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gh5rr\" (UniqueName: \"kubernetes.io/projected/d34c5891-320c-402e-9ee6-0f75ba7e2bbb-kube-api-access-gh5rr\") pod \"glance-default-internal-api-0\" (UID: \"d34c5891-320c-402e-9ee6-0f75ba7e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.916204 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"d34c5891-320c-402e-9ee6-0f75ba7e2bbb\") " pod="openstack/glance-default-internal-api-0" Oct 08 07:34:51 crc kubenswrapper[4693]: I1008 07:34:51.978283 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 08 07:34:52 crc kubenswrapper[4693]: I1008 07:34:52.607902 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 08 07:34:52 crc kubenswrapper[4693]: I1008 07:34:52.624064 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f58544a-80c3-438f-ba73-9b83d095b3b3","Type":"ContainerStarted","Data":"f63b5dbaf1ad3054df0f25142d88f63f061899da4cb2f4d959c4797c62b97f9d"} Oct 08 07:34:52 crc kubenswrapper[4693]: I1008 07:34:52.624205 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5f58544a-80c3-438f-ba73-9b83d095b3b3" containerName="ceilometer-central-agent" containerID="cri-o://709ab9aabcedb6747b30d52514a99ddf86bffd135eeeaf22b5581fece090e7b9" gracePeriod=30 Oct 08 07:34:52 crc kubenswrapper[4693]: I1008 07:34:52.624480 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 08 07:34:52 crc kubenswrapper[4693]: I1008 07:34:52.624698 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5f58544a-80c3-438f-ba73-9b83d095b3b3" containerName="proxy-httpd" containerID="cri-o://f63b5dbaf1ad3054df0f25142d88f63f061899da4cb2f4d959c4797c62b97f9d" gracePeriod=30 Oct 08 07:34:52 crc kubenswrapper[4693]: I1008 07:34:52.624743 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5f58544a-80c3-438f-ba73-9b83d095b3b3" containerName="sg-core" containerID="cri-o://415b1473955b634edb41cad4e81485d03606c73826ceb376b76e67c33f0e7a15" gracePeriod=30 Oct 08 07:34:52 crc kubenswrapper[4693]: I1008 07:34:52.624775 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5f58544a-80c3-438f-ba73-9b83d095b3b3" containerName="ceilometer-notification-agent" containerID="cri-o://dc6f87c7fb37695da0fdb4c4084e634fe8789a8f428290932111fb30cdc853f9" gracePeriod=30 Oct 08 07:34:52 crc kubenswrapper[4693]: I1008 07:34:52.654394 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.065017772 podStartE2EDuration="5.654381009s" podCreationTimestamp="2025-10-08 07:34:47 +0000 UTC" firstStartedPulling="2025-10-08 07:34:48.669665299 +0000 UTC m=+1074.040630234" lastFinishedPulling="2025-10-08 07:34:52.259028536 +0000 UTC m=+1077.629993471" observedRunningTime="2025-10-08 07:34:52.650420487 +0000 UTC m=+1078.021385422" watchObservedRunningTime="2025-10-08 07:34:52.654381009 +0000 UTC m=+1078.025345944" Oct 08 07:34:53 crc kubenswrapper[4693]: I1008 07:34:53.376786 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de2f6c37-8e32-4276-bbe3-9f5c404a18b3" path="/var/lib/kubelet/pods/de2f6c37-8e32-4276-bbe3-9f5c404a18b3/volumes" Oct 08 07:34:53 crc kubenswrapper[4693]: I1008 07:34:53.496876 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:34:53 crc kubenswrapper[4693]: I1008 07:34:53.496956 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:34:53 crc kubenswrapper[4693]: I1008 07:34:53.642635 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d34c5891-320c-402e-9ee6-0f75ba7e2bbb","Type":"ContainerStarted","Data":"047186577d1228eed7d47b81fc4865bc8e49c7ae6dd00dcb0a6c0eaf93983f27"} Oct 08 07:34:53 crc kubenswrapper[4693]: I1008 07:34:53.642732 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d34c5891-320c-402e-9ee6-0f75ba7e2bbb","Type":"ContainerStarted","Data":"080e600e4573a2ec004ea09fd859b0f61530b0226e422895209a89321332dd08"} Oct 08 07:34:53 crc kubenswrapper[4693]: I1008 07:34:53.646648 4693 generic.go:334] "Generic (PLEG): container finished" podID="5f58544a-80c3-438f-ba73-9b83d095b3b3" containerID="415b1473955b634edb41cad4e81485d03606c73826ceb376b76e67c33f0e7a15" exitCode=2 Oct 08 07:34:53 crc kubenswrapper[4693]: I1008 07:34:53.646690 4693 generic.go:334] "Generic (PLEG): container finished" podID="5f58544a-80c3-438f-ba73-9b83d095b3b3" containerID="dc6f87c7fb37695da0fdb4c4084e634fe8789a8f428290932111fb30cdc853f9" exitCode=0 Oct 08 07:34:53 crc kubenswrapper[4693]: I1008 07:34:53.646697 4693 generic.go:334] "Generic (PLEG): container finished" podID="5f58544a-80c3-438f-ba73-9b83d095b3b3" containerID="709ab9aabcedb6747b30d52514a99ddf86bffd135eeeaf22b5581fece090e7b9" exitCode=0 Oct 08 07:34:53 crc kubenswrapper[4693]: I1008 07:34:53.646710 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f58544a-80c3-438f-ba73-9b83d095b3b3","Type":"ContainerDied","Data":"415b1473955b634edb41cad4e81485d03606c73826ceb376b76e67c33f0e7a15"} Oct 08 07:34:53 crc kubenswrapper[4693]: I1008 07:34:53.646730 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f58544a-80c3-438f-ba73-9b83d095b3b3","Type":"ContainerDied","Data":"dc6f87c7fb37695da0fdb4c4084e634fe8789a8f428290932111fb30cdc853f9"} Oct 08 07:34:53 crc kubenswrapper[4693]: I1008 07:34:53.646739 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f58544a-80c3-438f-ba73-9b83d095b3b3","Type":"ContainerDied","Data":"709ab9aabcedb6747b30d52514a99ddf86bffd135eeeaf22b5581fece090e7b9"} Oct 08 07:34:54 crc kubenswrapper[4693]: I1008 07:34:54.657425 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d34c5891-320c-402e-9ee6-0f75ba7e2bbb","Type":"ContainerStarted","Data":"cd1c8e684fb322e55c1e81c227af11fec8c3f107e8b139abff0c1713b43ef2ce"} Oct 08 07:34:54 crc kubenswrapper[4693]: I1008 07:34:54.678956 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.678941139 podStartE2EDuration="3.678941139s" podCreationTimestamp="2025-10-08 07:34:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:34:54.677115836 +0000 UTC m=+1080.048080781" watchObservedRunningTime="2025-10-08 07:34:54.678941139 +0000 UTC m=+1080.049906074" Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.601748 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.680620 4693 generic.go:334] "Generic (PLEG): container finished" podID="4c13d244-5d68-4fdc-834e-90409425f7f4" containerID="fc92eb0f369f2698c35e8a4718fd26c5bba374f74c755d0186e9ffc29c1c2837" exitCode=137 Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.680685 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-647ccf6b96-zrz9s" Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.680686 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-647ccf6b96-zrz9s" event={"ID":"4c13d244-5d68-4fdc-834e-90409425f7f4","Type":"ContainerDied","Data":"fc92eb0f369f2698c35e8a4718fd26c5bba374f74c755d0186e9ffc29c1c2837"} Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.680902 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-647ccf6b96-zrz9s" event={"ID":"4c13d244-5d68-4fdc-834e-90409425f7f4","Type":"ContainerDied","Data":"58bc6c46396681ef02b318afaae45c284ed34c9123dd771ca58364d7c3a4722e"} Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.680978 4693 scope.go:117] "RemoveContainer" containerID="1e19d1e84dee0ab2547ac52659a0dbc638b95ad225686a77cb411f0a920dc101" Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.702328 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kv7qs\" (UniqueName: \"kubernetes.io/projected/4c13d244-5d68-4fdc-834e-90409425f7f4-kube-api-access-kv7qs\") pod \"4c13d244-5d68-4fdc-834e-90409425f7f4\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.702456 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4c13d244-5d68-4fdc-834e-90409425f7f4-horizon-secret-key\") pod \"4c13d244-5d68-4fdc-834e-90409425f7f4\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.702505 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c13d244-5d68-4fdc-834e-90409425f7f4-combined-ca-bundle\") pod \"4c13d244-5d68-4fdc-834e-90409425f7f4\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.702543 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4c13d244-5d68-4fdc-834e-90409425f7f4-config-data\") pod \"4c13d244-5d68-4fdc-834e-90409425f7f4\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.702578 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4c13d244-5d68-4fdc-834e-90409425f7f4-scripts\") pod \"4c13d244-5d68-4fdc-834e-90409425f7f4\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.702669 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c13d244-5d68-4fdc-834e-90409425f7f4-logs\") pod \"4c13d244-5d68-4fdc-834e-90409425f7f4\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.702805 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c13d244-5d68-4fdc-834e-90409425f7f4-horizon-tls-certs\") pod \"4c13d244-5d68-4fdc-834e-90409425f7f4\" (UID: \"4c13d244-5d68-4fdc-834e-90409425f7f4\") " Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.703556 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c13d244-5d68-4fdc-834e-90409425f7f4-logs" (OuterVolumeSpecName: "logs") pod "4c13d244-5d68-4fdc-834e-90409425f7f4" (UID: "4c13d244-5d68-4fdc-834e-90409425f7f4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.712053 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c13d244-5d68-4fdc-834e-90409425f7f4-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "4c13d244-5d68-4fdc-834e-90409425f7f4" (UID: "4c13d244-5d68-4fdc-834e-90409425f7f4"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.712225 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c13d244-5d68-4fdc-834e-90409425f7f4-kube-api-access-kv7qs" (OuterVolumeSpecName: "kube-api-access-kv7qs") pod "4c13d244-5d68-4fdc-834e-90409425f7f4" (UID: "4c13d244-5d68-4fdc-834e-90409425f7f4"). InnerVolumeSpecName "kube-api-access-kv7qs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.731318 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c13d244-5d68-4fdc-834e-90409425f7f4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4c13d244-5d68-4fdc-834e-90409425f7f4" (UID: "4c13d244-5d68-4fdc-834e-90409425f7f4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.741991 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c13d244-5d68-4fdc-834e-90409425f7f4-config-data" (OuterVolumeSpecName: "config-data") pod "4c13d244-5d68-4fdc-834e-90409425f7f4" (UID: "4c13d244-5d68-4fdc-834e-90409425f7f4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.749730 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c13d244-5d68-4fdc-834e-90409425f7f4-scripts" (OuterVolumeSpecName: "scripts") pod "4c13d244-5d68-4fdc-834e-90409425f7f4" (UID: "4c13d244-5d68-4fdc-834e-90409425f7f4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.775213 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c13d244-5d68-4fdc-834e-90409425f7f4-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "4c13d244-5d68-4fdc-834e-90409425f7f4" (UID: "4c13d244-5d68-4fdc-834e-90409425f7f4"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.805721 4693 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c13d244-5d68-4fdc-834e-90409425f7f4-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.805762 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kv7qs\" (UniqueName: \"kubernetes.io/projected/4c13d244-5d68-4fdc-834e-90409425f7f4-kube-api-access-kv7qs\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.805777 4693 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4c13d244-5d68-4fdc-834e-90409425f7f4-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.805788 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c13d244-5d68-4fdc-834e-90409425f7f4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.805800 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4c13d244-5d68-4fdc-834e-90409425f7f4-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.805829 4693 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4c13d244-5d68-4fdc-834e-90409425f7f4-scripts\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.805840 4693 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c13d244-5d68-4fdc-834e-90409425f7f4-logs\") on node \"crc\" DevicePath \"\"" Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.879709 4693 scope.go:117] "RemoveContainer" containerID="fc92eb0f369f2698c35e8a4718fd26c5bba374f74c755d0186e9ffc29c1c2837" Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.897533 4693 scope.go:117] "RemoveContainer" containerID="1e19d1e84dee0ab2547ac52659a0dbc638b95ad225686a77cb411f0a920dc101" Oct 08 07:34:56 crc kubenswrapper[4693]: E1008 07:34:56.898059 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e19d1e84dee0ab2547ac52659a0dbc638b95ad225686a77cb411f0a920dc101\": container with ID starting with 1e19d1e84dee0ab2547ac52659a0dbc638b95ad225686a77cb411f0a920dc101 not found: ID does not exist" containerID="1e19d1e84dee0ab2547ac52659a0dbc638b95ad225686a77cb411f0a920dc101" Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.898105 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e19d1e84dee0ab2547ac52659a0dbc638b95ad225686a77cb411f0a920dc101"} err="failed to get container status \"1e19d1e84dee0ab2547ac52659a0dbc638b95ad225686a77cb411f0a920dc101\": rpc error: code = NotFound desc = could not find container \"1e19d1e84dee0ab2547ac52659a0dbc638b95ad225686a77cb411f0a920dc101\": container with ID starting with 1e19d1e84dee0ab2547ac52659a0dbc638b95ad225686a77cb411f0a920dc101 not found: ID does not exist" Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.898130 4693 scope.go:117] "RemoveContainer" containerID="fc92eb0f369f2698c35e8a4718fd26c5bba374f74c755d0186e9ffc29c1c2837" Oct 08 07:34:56 crc kubenswrapper[4693]: E1008 07:34:56.898506 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc92eb0f369f2698c35e8a4718fd26c5bba374f74c755d0186e9ffc29c1c2837\": container with ID starting with fc92eb0f369f2698c35e8a4718fd26c5bba374f74c755d0186e9ffc29c1c2837 not found: ID does not exist" containerID="fc92eb0f369f2698c35e8a4718fd26c5bba374f74c755d0186e9ffc29c1c2837" Oct 08 07:34:56 crc kubenswrapper[4693]: I1008 07:34:56.898530 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc92eb0f369f2698c35e8a4718fd26c5bba374f74c755d0186e9ffc29c1c2837"} err="failed to get container status \"fc92eb0f369f2698c35e8a4718fd26c5bba374f74c755d0186e9ffc29c1c2837\": rpc error: code = NotFound desc = could not find container \"fc92eb0f369f2698c35e8a4718fd26c5bba374f74c755d0186e9ffc29c1c2837\": container with ID starting with fc92eb0f369f2698c35e8a4718fd26c5bba374f74c755d0186e9ffc29c1c2837 not found: ID does not exist" Oct 08 07:34:57 crc kubenswrapper[4693]: I1008 07:34:57.025295 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-647ccf6b96-zrz9s"] Oct 08 07:34:57 crc kubenswrapper[4693]: I1008 07:34:57.033312 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-647ccf6b96-zrz9s"] Oct 08 07:34:57 crc kubenswrapper[4693]: I1008 07:34:57.375099 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c13d244-5d68-4fdc-834e-90409425f7f4" path="/var/lib/kubelet/pods/4c13d244-5d68-4fdc-834e-90409425f7f4/volumes" Oct 08 07:34:57 crc kubenswrapper[4693]: I1008 07:34:57.999473 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 08 07:34:57 crc kubenswrapper[4693]: I1008 07:34:57.999565 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 08 07:34:58 crc kubenswrapper[4693]: I1008 07:34:58.046199 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 08 07:34:58 crc kubenswrapper[4693]: I1008 07:34:58.085577 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 08 07:34:58 crc kubenswrapper[4693]: I1008 07:34:58.706793 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 08 07:34:58 crc kubenswrapper[4693]: I1008 07:34:58.707203 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 08 07:35:00 crc kubenswrapper[4693]: I1008 07:35:00.478612 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 08 07:35:00 crc kubenswrapper[4693]: I1008 07:35:00.589449 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.008576 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-9fa1-account-create-f6sgs"] Oct 08 07:35:01 crc kubenswrapper[4693]: E1008 07:35:01.008987 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c13d244-5d68-4fdc-834e-90409425f7f4" containerName="horizon-log" Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.009004 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c13d244-5d68-4fdc-834e-90409425f7f4" containerName="horizon-log" Oct 08 07:35:01 crc kubenswrapper[4693]: E1008 07:35:01.009026 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c13d244-5d68-4fdc-834e-90409425f7f4" containerName="horizon" Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.009033 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c13d244-5d68-4fdc-834e-90409425f7f4" containerName="horizon" Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.009194 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c13d244-5d68-4fdc-834e-90409425f7f4" containerName="horizon" Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.009214 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c13d244-5d68-4fdc-834e-90409425f7f4" containerName="horizon-log" Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.009775 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-9fa1-account-create-f6sgs" Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.011973 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.024848 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-9fa1-account-create-f6sgs"] Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.093716 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxq7z\" (UniqueName: \"kubernetes.io/projected/5f2182c2-a3ca-408b-9a70-876c78d382e4-kube-api-access-rxq7z\") pod \"nova-api-9fa1-account-create-f6sgs\" (UID: \"5f2182c2-a3ca-408b-9a70-876c78d382e4\") " pod="openstack/nova-api-9fa1-account-create-f6sgs" Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.194927 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxq7z\" (UniqueName: \"kubernetes.io/projected/5f2182c2-a3ca-408b-9a70-876c78d382e4-kube-api-access-rxq7z\") pod \"nova-api-9fa1-account-create-f6sgs\" (UID: \"5f2182c2-a3ca-408b-9a70-876c78d382e4\") " pod="openstack/nova-api-9fa1-account-create-f6sgs" Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.217725 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-bd05-account-create-vtqfj"] Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.218862 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-bd05-account-create-vtqfj" Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.232774 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.234500 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxq7z\" (UniqueName: \"kubernetes.io/projected/5f2182c2-a3ca-408b-9a70-876c78d382e4-kube-api-access-rxq7z\") pod \"nova-api-9fa1-account-create-f6sgs\" (UID: \"5f2182c2-a3ca-408b-9a70-876c78d382e4\") " pod="openstack/nova-api-9fa1-account-create-f6sgs" Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.241972 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-bd05-account-create-vtqfj"] Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.333260 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-9fa1-account-create-f6sgs" Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.398157 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4jlt\" (UniqueName: \"kubernetes.io/projected/6477ef9b-7553-43d9-ae63-d705ffad2213-kube-api-access-c4jlt\") pod \"nova-cell0-bd05-account-create-vtqfj\" (UID: \"6477ef9b-7553-43d9-ae63-d705ffad2213\") " pod="openstack/nova-cell0-bd05-account-create-vtqfj" Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.414575 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-8a18-account-create-5qb8q"] Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.415588 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8a18-account-create-5qb8q" Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.418777 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.437766 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-8a18-account-create-5qb8q"] Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.499730 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4jlt\" (UniqueName: \"kubernetes.io/projected/6477ef9b-7553-43d9-ae63-d705ffad2213-kube-api-access-c4jlt\") pod \"nova-cell0-bd05-account-create-vtqfj\" (UID: \"6477ef9b-7553-43d9-ae63-d705ffad2213\") " pod="openstack/nova-cell0-bd05-account-create-vtqfj" Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.499860 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44jfw\" (UniqueName: \"kubernetes.io/projected/e5e77527-c5f6-4ec3-8a18-b2ec14900f8f-kube-api-access-44jfw\") pod \"nova-cell1-8a18-account-create-5qb8q\" (UID: \"e5e77527-c5f6-4ec3-8a18-b2ec14900f8f\") " pod="openstack/nova-cell1-8a18-account-create-5qb8q" Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.537503 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4jlt\" (UniqueName: \"kubernetes.io/projected/6477ef9b-7553-43d9-ae63-d705ffad2213-kube-api-access-c4jlt\") pod \"nova-cell0-bd05-account-create-vtqfj\" (UID: \"6477ef9b-7553-43d9-ae63-d705ffad2213\") " pod="openstack/nova-cell0-bd05-account-create-vtqfj" Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.601882 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44jfw\" (UniqueName: \"kubernetes.io/projected/e5e77527-c5f6-4ec3-8a18-b2ec14900f8f-kube-api-access-44jfw\") pod \"nova-cell1-8a18-account-create-5qb8q\" (UID: \"e5e77527-c5f6-4ec3-8a18-b2ec14900f8f\") " pod="openstack/nova-cell1-8a18-account-create-5qb8q" Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.616530 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44jfw\" (UniqueName: \"kubernetes.io/projected/e5e77527-c5f6-4ec3-8a18-b2ec14900f8f-kube-api-access-44jfw\") pod \"nova-cell1-8a18-account-create-5qb8q\" (UID: \"e5e77527-c5f6-4ec3-8a18-b2ec14900f8f\") " pod="openstack/nova-cell1-8a18-account-create-5qb8q" Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.620251 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-9fa1-account-create-f6sgs"] Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.735357 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-9fa1-account-create-f6sgs" event={"ID":"5f2182c2-a3ca-408b-9a70-876c78d382e4","Type":"ContainerStarted","Data":"c85825381deb1c450cb63a77ac5a0263bc4a7d93b67641b26859d45e984fb239"} Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.766655 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8a18-account-create-5qb8q" Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.834011 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-bd05-account-create-vtqfj" Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.979265 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 08 07:35:01 crc kubenswrapper[4693]: I1008 07:35:01.979308 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 08 07:35:02 crc kubenswrapper[4693]: I1008 07:35:02.016501 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 08 07:35:02 crc kubenswrapper[4693]: I1008 07:35:02.020967 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 08 07:35:02 crc kubenswrapper[4693]: I1008 07:35:02.279717 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-8a18-account-create-5qb8q"] Oct 08 07:35:02 crc kubenswrapper[4693]: I1008 07:35:02.364134 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-bd05-account-create-vtqfj"] Oct 08 07:35:02 crc kubenswrapper[4693]: I1008 07:35:02.745529 4693 generic.go:334] "Generic (PLEG): container finished" podID="e5e77527-c5f6-4ec3-8a18-b2ec14900f8f" containerID="01c447124f95edb09201e26ba5e748f3e1601b1d4adc8c6a08bfa90aa19f0b36" exitCode=0 Oct 08 07:35:02 crc kubenswrapper[4693]: I1008 07:35:02.745597 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-8a18-account-create-5qb8q" event={"ID":"e5e77527-c5f6-4ec3-8a18-b2ec14900f8f","Type":"ContainerDied","Data":"01c447124f95edb09201e26ba5e748f3e1601b1d4adc8c6a08bfa90aa19f0b36"} Oct 08 07:35:02 crc kubenswrapper[4693]: I1008 07:35:02.745624 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-8a18-account-create-5qb8q" event={"ID":"e5e77527-c5f6-4ec3-8a18-b2ec14900f8f","Type":"ContainerStarted","Data":"c116a77b3c5365d15dacd4ffcafefea453091d118ff43746924904ca3708a599"} Oct 08 07:35:02 crc kubenswrapper[4693]: I1008 07:35:02.747259 4693 generic.go:334] "Generic (PLEG): container finished" podID="5f2182c2-a3ca-408b-9a70-876c78d382e4" containerID="9441edb5fda88749d2822abe6a39380dd6c6b29bb7821f84e912151952e6e632" exitCode=0 Oct 08 07:35:02 crc kubenswrapper[4693]: I1008 07:35:02.747322 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-9fa1-account-create-f6sgs" event={"ID":"5f2182c2-a3ca-408b-9a70-876c78d382e4","Type":"ContainerDied","Data":"9441edb5fda88749d2822abe6a39380dd6c6b29bb7821f84e912151952e6e632"} Oct 08 07:35:02 crc kubenswrapper[4693]: I1008 07:35:02.748882 4693 generic.go:334] "Generic (PLEG): container finished" podID="6477ef9b-7553-43d9-ae63-d705ffad2213" containerID="3045375e561941cbe2f47c2d4cd049225f160b6869463480e2d0725249d1ebdb" exitCode=0 Oct 08 07:35:02 crc kubenswrapper[4693]: I1008 07:35:02.748922 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-bd05-account-create-vtqfj" event={"ID":"6477ef9b-7553-43d9-ae63-d705ffad2213","Type":"ContainerDied","Data":"3045375e561941cbe2f47c2d4cd049225f160b6869463480e2d0725249d1ebdb"} Oct 08 07:35:02 crc kubenswrapper[4693]: I1008 07:35:02.748954 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-bd05-account-create-vtqfj" event={"ID":"6477ef9b-7553-43d9-ae63-d705ffad2213","Type":"ContainerStarted","Data":"d4558717a91f6b160369f5a63f20628cc0200dff10699f5c9d905aeef5f240c6"} Oct 08 07:35:02 crc kubenswrapper[4693]: I1008 07:35:02.749172 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 08 07:35:02 crc kubenswrapper[4693]: I1008 07:35:02.749200 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 08 07:35:04 crc kubenswrapper[4693]: I1008 07:35:04.151883 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-bd05-account-create-vtqfj" Oct 08 07:35:04 crc kubenswrapper[4693]: I1008 07:35:04.247004 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4jlt\" (UniqueName: \"kubernetes.io/projected/6477ef9b-7553-43d9-ae63-d705ffad2213-kube-api-access-c4jlt\") pod \"6477ef9b-7553-43d9-ae63-d705ffad2213\" (UID: \"6477ef9b-7553-43d9-ae63-d705ffad2213\") " Oct 08 07:35:04 crc kubenswrapper[4693]: I1008 07:35:04.255988 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6477ef9b-7553-43d9-ae63-d705ffad2213-kube-api-access-c4jlt" (OuterVolumeSpecName: "kube-api-access-c4jlt") pod "6477ef9b-7553-43d9-ae63-d705ffad2213" (UID: "6477ef9b-7553-43d9-ae63-d705ffad2213"). InnerVolumeSpecName "kube-api-access-c4jlt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:35:04 crc kubenswrapper[4693]: I1008 07:35:04.295654 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8a18-account-create-5qb8q" Oct 08 07:35:04 crc kubenswrapper[4693]: I1008 07:35:04.302760 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-9fa1-account-create-f6sgs" Oct 08 07:35:04 crc kubenswrapper[4693]: I1008 07:35:04.349060 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44jfw\" (UniqueName: \"kubernetes.io/projected/e5e77527-c5f6-4ec3-8a18-b2ec14900f8f-kube-api-access-44jfw\") pod \"e5e77527-c5f6-4ec3-8a18-b2ec14900f8f\" (UID: \"e5e77527-c5f6-4ec3-8a18-b2ec14900f8f\") " Oct 08 07:35:04 crc kubenswrapper[4693]: I1008 07:35:04.349127 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxq7z\" (UniqueName: \"kubernetes.io/projected/5f2182c2-a3ca-408b-9a70-876c78d382e4-kube-api-access-rxq7z\") pod \"5f2182c2-a3ca-408b-9a70-876c78d382e4\" (UID: \"5f2182c2-a3ca-408b-9a70-876c78d382e4\") " Oct 08 07:35:04 crc kubenswrapper[4693]: I1008 07:35:04.349565 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4jlt\" (UniqueName: \"kubernetes.io/projected/6477ef9b-7553-43d9-ae63-d705ffad2213-kube-api-access-c4jlt\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:04 crc kubenswrapper[4693]: I1008 07:35:04.351886 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5e77527-c5f6-4ec3-8a18-b2ec14900f8f-kube-api-access-44jfw" (OuterVolumeSpecName: "kube-api-access-44jfw") pod "e5e77527-c5f6-4ec3-8a18-b2ec14900f8f" (UID: "e5e77527-c5f6-4ec3-8a18-b2ec14900f8f"). InnerVolumeSpecName "kube-api-access-44jfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:35:04 crc kubenswrapper[4693]: I1008 07:35:04.353250 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f2182c2-a3ca-408b-9a70-876c78d382e4-kube-api-access-rxq7z" (OuterVolumeSpecName: "kube-api-access-rxq7z") pod "5f2182c2-a3ca-408b-9a70-876c78d382e4" (UID: "5f2182c2-a3ca-408b-9a70-876c78d382e4"). InnerVolumeSpecName "kube-api-access-rxq7z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:35:04 crc kubenswrapper[4693]: I1008 07:35:04.451544 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44jfw\" (UniqueName: \"kubernetes.io/projected/e5e77527-c5f6-4ec3-8a18-b2ec14900f8f-kube-api-access-44jfw\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:04 crc kubenswrapper[4693]: I1008 07:35:04.451581 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxq7z\" (UniqueName: \"kubernetes.io/projected/5f2182c2-a3ca-408b-9a70-876c78d382e4-kube-api-access-rxq7z\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:04 crc kubenswrapper[4693]: I1008 07:35:04.764824 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-8a18-account-create-5qb8q" event={"ID":"e5e77527-c5f6-4ec3-8a18-b2ec14900f8f","Type":"ContainerDied","Data":"c116a77b3c5365d15dacd4ffcafefea453091d118ff43746924904ca3708a599"} Oct 08 07:35:04 crc kubenswrapper[4693]: I1008 07:35:04.764864 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c116a77b3c5365d15dacd4ffcafefea453091d118ff43746924904ca3708a599" Oct 08 07:35:04 crc kubenswrapper[4693]: I1008 07:35:04.764879 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8a18-account-create-5qb8q" Oct 08 07:35:04 crc kubenswrapper[4693]: I1008 07:35:04.766265 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-9fa1-account-create-f6sgs" event={"ID":"5f2182c2-a3ca-408b-9a70-876c78d382e4","Type":"ContainerDied","Data":"c85825381deb1c450cb63a77ac5a0263bc4a7d93b67641b26859d45e984fb239"} Oct 08 07:35:04 crc kubenswrapper[4693]: I1008 07:35:04.766309 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c85825381deb1c450cb63a77ac5a0263bc4a7d93b67641b26859d45e984fb239" Oct 08 07:35:04 crc kubenswrapper[4693]: I1008 07:35:04.766287 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-9fa1-account-create-f6sgs" Oct 08 07:35:04 crc kubenswrapper[4693]: I1008 07:35:04.768045 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-bd05-account-create-vtqfj" event={"ID":"6477ef9b-7553-43d9-ae63-d705ffad2213","Type":"ContainerDied","Data":"d4558717a91f6b160369f5a63f20628cc0200dff10699f5c9d905aeef5f240c6"} Oct 08 07:35:04 crc kubenswrapper[4693]: I1008 07:35:04.768068 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d4558717a91f6b160369f5a63f20628cc0200dff10699f5c9d905aeef5f240c6" Oct 08 07:35:04 crc kubenswrapper[4693]: I1008 07:35:04.768122 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-bd05-account-create-vtqfj" Oct 08 07:35:04 crc kubenswrapper[4693]: I1008 07:35:04.901918 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 08 07:35:04 crc kubenswrapper[4693]: I1008 07:35:04.902288 4693 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 08 07:35:04 crc kubenswrapper[4693]: I1008 07:35:04.906672 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 08 07:35:06 crc kubenswrapper[4693]: I1008 07:35:06.374488 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-n9c4r"] Oct 08 07:35:06 crc kubenswrapper[4693]: E1008 07:35:06.375046 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f2182c2-a3ca-408b-9a70-876c78d382e4" containerName="mariadb-account-create" Oct 08 07:35:06 crc kubenswrapper[4693]: I1008 07:35:06.375058 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f2182c2-a3ca-408b-9a70-876c78d382e4" containerName="mariadb-account-create" Oct 08 07:35:06 crc kubenswrapper[4693]: E1008 07:35:06.375084 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5e77527-c5f6-4ec3-8a18-b2ec14900f8f" containerName="mariadb-account-create" Oct 08 07:35:06 crc kubenswrapper[4693]: I1008 07:35:06.375091 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5e77527-c5f6-4ec3-8a18-b2ec14900f8f" containerName="mariadb-account-create" Oct 08 07:35:06 crc kubenswrapper[4693]: E1008 07:35:06.375119 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6477ef9b-7553-43d9-ae63-d705ffad2213" containerName="mariadb-account-create" Oct 08 07:35:06 crc kubenswrapper[4693]: I1008 07:35:06.375125 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="6477ef9b-7553-43d9-ae63-d705ffad2213" containerName="mariadb-account-create" Oct 08 07:35:06 crc kubenswrapper[4693]: I1008 07:35:06.375275 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5e77527-c5f6-4ec3-8a18-b2ec14900f8f" containerName="mariadb-account-create" Oct 08 07:35:06 crc kubenswrapper[4693]: I1008 07:35:06.375289 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f2182c2-a3ca-408b-9a70-876c78d382e4" containerName="mariadb-account-create" Oct 08 07:35:06 crc kubenswrapper[4693]: I1008 07:35:06.375297 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="6477ef9b-7553-43d9-ae63-d705ffad2213" containerName="mariadb-account-create" Oct 08 07:35:06 crc kubenswrapper[4693]: I1008 07:35:06.375856 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-n9c4r" Oct 08 07:35:06 crc kubenswrapper[4693]: I1008 07:35:06.379624 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-5b5zg" Oct 08 07:35:06 crc kubenswrapper[4693]: I1008 07:35:06.379828 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Oct 08 07:35:06 crc kubenswrapper[4693]: I1008 07:35:06.380732 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 08 07:35:06 crc kubenswrapper[4693]: I1008 07:35:06.387983 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-n9c4r"] Oct 08 07:35:06 crc kubenswrapper[4693]: I1008 07:35:06.486724 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3-config-data\") pod \"nova-cell0-conductor-db-sync-n9c4r\" (UID: \"6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3\") " pod="openstack/nova-cell0-conductor-db-sync-n9c4r" Oct 08 07:35:06 crc kubenswrapper[4693]: I1008 07:35:06.486874 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-n9c4r\" (UID: \"6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3\") " pod="openstack/nova-cell0-conductor-db-sync-n9c4r" Oct 08 07:35:06 crc kubenswrapper[4693]: I1008 07:35:06.486935 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3-scripts\") pod \"nova-cell0-conductor-db-sync-n9c4r\" (UID: \"6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3\") " pod="openstack/nova-cell0-conductor-db-sync-n9c4r" Oct 08 07:35:06 crc kubenswrapper[4693]: I1008 07:35:06.487092 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfdmb\" (UniqueName: \"kubernetes.io/projected/6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3-kube-api-access-zfdmb\") pod \"nova-cell0-conductor-db-sync-n9c4r\" (UID: \"6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3\") " pod="openstack/nova-cell0-conductor-db-sync-n9c4r" Oct 08 07:35:06 crc kubenswrapper[4693]: I1008 07:35:06.588180 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfdmb\" (UniqueName: \"kubernetes.io/projected/6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3-kube-api-access-zfdmb\") pod \"nova-cell0-conductor-db-sync-n9c4r\" (UID: \"6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3\") " pod="openstack/nova-cell0-conductor-db-sync-n9c4r" Oct 08 07:35:06 crc kubenswrapper[4693]: I1008 07:35:06.588246 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3-config-data\") pod \"nova-cell0-conductor-db-sync-n9c4r\" (UID: \"6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3\") " pod="openstack/nova-cell0-conductor-db-sync-n9c4r" Oct 08 07:35:06 crc kubenswrapper[4693]: I1008 07:35:06.588308 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-n9c4r\" (UID: \"6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3\") " pod="openstack/nova-cell0-conductor-db-sync-n9c4r" Oct 08 07:35:06 crc kubenswrapper[4693]: I1008 07:35:06.589140 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3-scripts\") pod \"nova-cell0-conductor-db-sync-n9c4r\" (UID: \"6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3\") " pod="openstack/nova-cell0-conductor-db-sync-n9c4r" Oct 08 07:35:06 crc kubenswrapper[4693]: I1008 07:35:06.595632 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-n9c4r\" (UID: \"6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3\") " pod="openstack/nova-cell0-conductor-db-sync-n9c4r" Oct 08 07:35:06 crc kubenswrapper[4693]: I1008 07:35:06.596318 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3-scripts\") pod \"nova-cell0-conductor-db-sync-n9c4r\" (UID: \"6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3\") " pod="openstack/nova-cell0-conductor-db-sync-n9c4r" Oct 08 07:35:06 crc kubenswrapper[4693]: I1008 07:35:06.596954 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3-config-data\") pod \"nova-cell0-conductor-db-sync-n9c4r\" (UID: \"6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3\") " pod="openstack/nova-cell0-conductor-db-sync-n9c4r" Oct 08 07:35:06 crc kubenswrapper[4693]: I1008 07:35:06.607132 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfdmb\" (UniqueName: \"kubernetes.io/projected/6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3-kube-api-access-zfdmb\") pod \"nova-cell0-conductor-db-sync-n9c4r\" (UID: \"6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3\") " pod="openstack/nova-cell0-conductor-db-sync-n9c4r" Oct 08 07:35:06 crc kubenswrapper[4693]: I1008 07:35:06.695115 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-n9c4r" Oct 08 07:35:07 crc kubenswrapper[4693]: I1008 07:35:07.251624 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-n9c4r"] Oct 08 07:35:07 crc kubenswrapper[4693]: I1008 07:35:07.830644 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-n9c4r" event={"ID":"6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3","Type":"ContainerStarted","Data":"ec2bcbb61c0ebe6b13e126ed4b05346a8f471c6269e13205bcedeee7bf414952"} Oct 08 07:35:14 crc kubenswrapper[4693]: I1008 07:35:14.916986 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-n9c4r" event={"ID":"6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3","Type":"ContainerStarted","Data":"e7f620b66bdb8ff42d4a7936a5ea2870d6de64371eafab136fa9455050004390"} Oct 08 07:35:14 crc kubenswrapper[4693]: I1008 07:35:14.948280 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-n9c4r" podStartSLOduration=1.759650349 podStartE2EDuration="8.948250871s" podCreationTimestamp="2025-10-08 07:35:06 +0000 UTC" firstStartedPulling="2025-10-08 07:35:07.260924763 +0000 UTC m=+1092.631889698" lastFinishedPulling="2025-10-08 07:35:14.449525275 +0000 UTC m=+1099.820490220" observedRunningTime="2025-10-08 07:35:14.944489104 +0000 UTC m=+1100.315454079" watchObservedRunningTime="2025-10-08 07:35:14.948250871 +0000 UTC m=+1100.319215836" Oct 08 07:35:18 crc kubenswrapper[4693]: I1008 07:35:18.047263 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="5f58544a-80c3-438f-ba73-9b83d095b3b3" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.038387 4693 generic.go:334] "Generic (PLEG): container finished" podID="5f58544a-80c3-438f-ba73-9b83d095b3b3" containerID="f63b5dbaf1ad3054df0f25142d88f63f061899da4cb2f4d959c4797c62b97f9d" exitCode=137 Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.039155 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f58544a-80c3-438f-ba73-9b83d095b3b3","Type":"ContainerDied","Data":"f63b5dbaf1ad3054df0f25142d88f63f061899da4cb2f4d959c4797c62b97f9d"} Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.039189 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f58544a-80c3-438f-ba73-9b83d095b3b3","Type":"ContainerDied","Data":"76c588659c94004b0346e9538401ccfb680799f61a4733ada86848e92ce42f4b"} Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.039202 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="76c588659c94004b0346e9538401ccfb680799f61a4733ada86848e92ce42f4b" Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.114881 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.142833 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f58544a-80c3-438f-ba73-9b83d095b3b3-run-httpd\") pod \"5f58544a-80c3-438f-ba73-9b83d095b3b3\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.142924 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f58544a-80c3-438f-ba73-9b83d095b3b3-scripts\") pod \"5f58544a-80c3-438f-ba73-9b83d095b3b3\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.142986 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4nk62\" (UniqueName: \"kubernetes.io/projected/5f58544a-80c3-438f-ba73-9b83d095b3b3-kube-api-access-4nk62\") pod \"5f58544a-80c3-438f-ba73-9b83d095b3b3\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.143065 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f58544a-80c3-438f-ba73-9b83d095b3b3-log-httpd\") pod \"5f58544a-80c3-438f-ba73-9b83d095b3b3\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.143236 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5f58544a-80c3-438f-ba73-9b83d095b3b3-sg-core-conf-yaml\") pod \"5f58544a-80c3-438f-ba73-9b83d095b3b3\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.143260 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f58544a-80c3-438f-ba73-9b83d095b3b3-config-data\") pod \"5f58544a-80c3-438f-ba73-9b83d095b3b3\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.143316 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f58544a-80c3-438f-ba73-9b83d095b3b3-combined-ca-bundle\") pod \"5f58544a-80c3-438f-ba73-9b83d095b3b3\" (UID: \"5f58544a-80c3-438f-ba73-9b83d095b3b3\") " Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.144182 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f58544a-80c3-438f-ba73-9b83d095b3b3-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5f58544a-80c3-438f-ba73-9b83d095b3b3" (UID: "5f58544a-80c3-438f-ba73-9b83d095b3b3"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.145282 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f58544a-80c3-438f-ba73-9b83d095b3b3-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5f58544a-80c3-438f-ba73-9b83d095b3b3" (UID: "5f58544a-80c3-438f-ba73-9b83d095b3b3"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.145737 4693 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f58544a-80c3-438f-ba73-9b83d095b3b3-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.145753 4693 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f58544a-80c3-438f-ba73-9b83d095b3b3-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.177628 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f58544a-80c3-438f-ba73-9b83d095b3b3-scripts" (OuterVolumeSpecName: "scripts") pod "5f58544a-80c3-438f-ba73-9b83d095b3b3" (UID: "5f58544a-80c3-438f-ba73-9b83d095b3b3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.192071 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f58544a-80c3-438f-ba73-9b83d095b3b3-kube-api-access-4nk62" (OuterVolumeSpecName: "kube-api-access-4nk62") pod "5f58544a-80c3-438f-ba73-9b83d095b3b3" (UID: "5f58544a-80c3-438f-ba73-9b83d095b3b3"). InnerVolumeSpecName "kube-api-access-4nk62". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.230042 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f58544a-80c3-438f-ba73-9b83d095b3b3-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5f58544a-80c3-438f-ba73-9b83d095b3b3" (UID: "5f58544a-80c3-438f-ba73-9b83d095b3b3"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.247747 4693 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f58544a-80c3-438f-ba73-9b83d095b3b3-scripts\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.247778 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4nk62\" (UniqueName: \"kubernetes.io/projected/5f58544a-80c3-438f-ba73-9b83d095b3b3-kube-api-access-4nk62\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.247790 4693 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5f58544a-80c3-438f-ba73-9b83d095b3b3-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.295197 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f58544a-80c3-438f-ba73-9b83d095b3b3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5f58544a-80c3-438f-ba73-9b83d095b3b3" (UID: "5f58544a-80c3-438f-ba73-9b83d095b3b3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.303786 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f58544a-80c3-438f-ba73-9b83d095b3b3-config-data" (OuterVolumeSpecName: "config-data") pod "5f58544a-80c3-438f-ba73-9b83d095b3b3" (UID: "5f58544a-80c3-438f-ba73-9b83d095b3b3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.349483 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f58544a-80c3-438f-ba73-9b83d095b3b3-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.349511 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f58544a-80c3-438f-ba73-9b83d095b3b3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.489813 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.489937 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.490018 4693 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.491307 4693 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"622f06c0bb6cdaa5465830b91799216c70f2eacc877b5e7e53cedc7bd9a96277"} pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 08 07:35:23 crc kubenswrapper[4693]: I1008 07:35:23.491454 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" containerID="cri-o://622f06c0bb6cdaa5465830b91799216c70f2eacc877b5e7e53cedc7bd9a96277" gracePeriod=600 Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.059062 4693 generic.go:334] "Generic (PLEG): container finished" podID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerID="622f06c0bb6cdaa5465830b91799216c70f2eacc877b5e7e53cedc7bd9a96277" exitCode=0 Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.059151 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerDied","Data":"622f06c0bb6cdaa5465830b91799216c70f2eacc877b5e7e53cedc7bd9a96277"} Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.059515 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerStarted","Data":"007470b3bab1092250300403efa58dc0217e53cad25ad454a5438806005d0400"} Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.059525 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.059545 4693 scope.go:117] "RemoveContainer" containerID="2d5b335782865c10d71fa446e1c1690f0fdc6f76d8d4163cb446a08bd0b03853" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.093497 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.141920 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.169651 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:35:24 crc kubenswrapper[4693]: E1008 07:35:24.170296 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f58544a-80c3-438f-ba73-9b83d095b3b3" containerName="ceilometer-central-agent" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.170336 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f58544a-80c3-438f-ba73-9b83d095b3b3" containerName="ceilometer-central-agent" Oct 08 07:35:24 crc kubenswrapper[4693]: E1008 07:35:24.170376 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f58544a-80c3-438f-ba73-9b83d095b3b3" containerName="proxy-httpd" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.170383 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f58544a-80c3-438f-ba73-9b83d095b3b3" containerName="proxy-httpd" Oct 08 07:35:24 crc kubenswrapper[4693]: E1008 07:35:24.170417 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f58544a-80c3-438f-ba73-9b83d095b3b3" containerName="ceilometer-notification-agent" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.170427 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f58544a-80c3-438f-ba73-9b83d095b3b3" containerName="ceilometer-notification-agent" Oct 08 07:35:24 crc kubenswrapper[4693]: E1008 07:35:24.170440 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f58544a-80c3-438f-ba73-9b83d095b3b3" containerName="sg-core" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.170446 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f58544a-80c3-438f-ba73-9b83d095b3b3" containerName="sg-core" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.170665 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f58544a-80c3-438f-ba73-9b83d095b3b3" containerName="sg-core" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.170677 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f58544a-80c3-438f-ba73-9b83d095b3b3" containerName="ceilometer-notification-agent" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.170688 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f58544a-80c3-438f-ba73-9b83d095b3b3" containerName="proxy-httpd" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.170710 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f58544a-80c3-438f-ba73-9b83d095b3b3" containerName="ceilometer-central-agent" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.172662 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.178369 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.178745 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.178919 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.267113 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wfdg\" (UniqueName: \"kubernetes.io/projected/71ae5ffc-8a6c-496c-83b3-3a394b90f549-kube-api-access-8wfdg\") pod \"ceilometer-0\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " pod="openstack/ceilometer-0" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.267170 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/71ae5ffc-8a6c-496c-83b3-3a394b90f549-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " pod="openstack/ceilometer-0" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.267192 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/71ae5ffc-8a6c-496c-83b3-3a394b90f549-run-httpd\") pod \"ceilometer-0\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " pod="openstack/ceilometer-0" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.267215 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/71ae5ffc-8a6c-496c-83b3-3a394b90f549-scripts\") pod \"ceilometer-0\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " pod="openstack/ceilometer-0" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.267280 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71ae5ffc-8a6c-496c-83b3-3a394b90f549-config-data\") pod \"ceilometer-0\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " pod="openstack/ceilometer-0" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.267311 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/71ae5ffc-8a6c-496c-83b3-3a394b90f549-log-httpd\") pod \"ceilometer-0\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " pod="openstack/ceilometer-0" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.267340 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71ae5ffc-8a6c-496c-83b3-3a394b90f549-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " pod="openstack/ceilometer-0" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.369733 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wfdg\" (UniqueName: \"kubernetes.io/projected/71ae5ffc-8a6c-496c-83b3-3a394b90f549-kube-api-access-8wfdg\") pod \"ceilometer-0\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " pod="openstack/ceilometer-0" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.369793 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/71ae5ffc-8a6c-496c-83b3-3a394b90f549-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " pod="openstack/ceilometer-0" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.369815 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/71ae5ffc-8a6c-496c-83b3-3a394b90f549-run-httpd\") pod \"ceilometer-0\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " pod="openstack/ceilometer-0" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.369856 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/71ae5ffc-8a6c-496c-83b3-3a394b90f549-scripts\") pod \"ceilometer-0\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " pod="openstack/ceilometer-0" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.369919 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71ae5ffc-8a6c-496c-83b3-3a394b90f549-config-data\") pod \"ceilometer-0\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " pod="openstack/ceilometer-0" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.369951 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/71ae5ffc-8a6c-496c-83b3-3a394b90f549-log-httpd\") pod \"ceilometer-0\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " pod="openstack/ceilometer-0" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.369985 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71ae5ffc-8a6c-496c-83b3-3a394b90f549-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " pod="openstack/ceilometer-0" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.371156 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/71ae5ffc-8a6c-496c-83b3-3a394b90f549-run-httpd\") pod \"ceilometer-0\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " pod="openstack/ceilometer-0" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.371713 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/71ae5ffc-8a6c-496c-83b3-3a394b90f549-log-httpd\") pod \"ceilometer-0\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " pod="openstack/ceilometer-0" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.377331 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71ae5ffc-8a6c-496c-83b3-3a394b90f549-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " pod="openstack/ceilometer-0" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.379305 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/71ae5ffc-8a6c-496c-83b3-3a394b90f549-scripts\") pod \"ceilometer-0\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " pod="openstack/ceilometer-0" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.383898 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71ae5ffc-8a6c-496c-83b3-3a394b90f549-config-data\") pod \"ceilometer-0\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " pod="openstack/ceilometer-0" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.390141 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/71ae5ffc-8a6c-496c-83b3-3a394b90f549-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " pod="openstack/ceilometer-0" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.398300 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wfdg\" (UniqueName: \"kubernetes.io/projected/71ae5ffc-8a6c-496c-83b3-3a394b90f549-kube-api-access-8wfdg\") pod \"ceilometer-0\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " pod="openstack/ceilometer-0" Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.492357 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 08 07:35:24 crc kubenswrapper[4693]: W1008 07:35:24.968039 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71ae5ffc_8a6c_496c_83b3_3a394b90f549.slice/crio-d2b744f6a3941b8d78f38c58e786099c0740f85ccbc7e34e7f3657000321efd9 WatchSource:0}: Error finding container d2b744f6a3941b8d78f38c58e786099c0740f85ccbc7e34e7f3657000321efd9: Status 404 returned error can't find the container with id d2b744f6a3941b8d78f38c58e786099c0740f85ccbc7e34e7f3657000321efd9 Oct 08 07:35:24 crc kubenswrapper[4693]: I1008 07:35:24.970812 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:35:25 crc kubenswrapper[4693]: I1008 07:35:25.083636 4693 generic.go:334] "Generic (PLEG): container finished" podID="6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3" containerID="e7f620b66bdb8ff42d4a7936a5ea2870d6de64371eafab136fa9455050004390" exitCode=0 Oct 08 07:35:25 crc kubenswrapper[4693]: I1008 07:35:25.083938 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-n9c4r" event={"ID":"6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3","Type":"ContainerDied","Data":"e7f620b66bdb8ff42d4a7936a5ea2870d6de64371eafab136fa9455050004390"} Oct 08 07:35:25 crc kubenswrapper[4693]: I1008 07:35:25.089527 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"71ae5ffc-8a6c-496c-83b3-3a394b90f549","Type":"ContainerStarted","Data":"d2b744f6a3941b8d78f38c58e786099c0740f85ccbc7e34e7f3657000321efd9"} Oct 08 07:35:25 crc kubenswrapper[4693]: I1008 07:35:25.381992 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f58544a-80c3-438f-ba73-9b83d095b3b3" path="/var/lib/kubelet/pods/5f58544a-80c3-438f-ba73-9b83d095b3b3/volumes" Oct 08 07:35:26 crc kubenswrapper[4693]: I1008 07:35:26.106321 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"71ae5ffc-8a6c-496c-83b3-3a394b90f549","Type":"ContainerStarted","Data":"d5fcd1875845bc57a947cdc7810085296d90149dfb25d355767afeb8b36f93ba"} Oct 08 07:35:26 crc kubenswrapper[4693]: I1008 07:35:26.528388 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-n9c4r" Oct 08 07:35:26 crc kubenswrapper[4693]: I1008 07:35:26.618574 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3-combined-ca-bundle\") pod \"6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3\" (UID: \"6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3\") " Oct 08 07:35:26 crc kubenswrapper[4693]: I1008 07:35:26.618880 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3-scripts\") pod \"6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3\" (UID: \"6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3\") " Oct 08 07:35:26 crc kubenswrapper[4693]: I1008 07:35:26.618966 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3-config-data\") pod \"6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3\" (UID: \"6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3\") " Oct 08 07:35:26 crc kubenswrapper[4693]: I1008 07:35:26.619089 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zfdmb\" (UniqueName: \"kubernetes.io/projected/6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3-kube-api-access-zfdmb\") pod \"6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3\" (UID: \"6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3\") " Oct 08 07:35:26 crc kubenswrapper[4693]: I1008 07:35:26.623963 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3-kube-api-access-zfdmb" (OuterVolumeSpecName: "kube-api-access-zfdmb") pod "6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3" (UID: "6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3"). InnerVolumeSpecName "kube-api-access-zfdmb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:35:26 crc kubenswrapper[4693]: I1008 07:35:26.624056 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3-scripts" (OuterVolumeSpecName: "scripts") pod "6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3" (UID: "6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:35:26 crc kubenswrapper[4693]: I1008 07:35:26.647466 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3-config-data" (OuterVolumeSpecName: "config-data") pod "6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3" (UID: "6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:35:26 crc kubenswrapper[4693]: I1008 07:35:26.650340 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3" (UID: "6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:35:26 crc kubenswrapper[4693]: I1008 07:35:26.721890 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:26 crc kubenswrapper[4693]: I1008 07:35:26.722113 4693 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3-scripts\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:26 crc kubenswrapper[4693]: I1008 07:35:26.722178 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:26 crc kubenswrapper[4693]: I1008 07:35:26.722231 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zfdmb\" (UniqueName: \"kubernetes.io/projected/6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3-kube-api-access-zfdmb\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:27 crc kubenswrapper[4693]: I1008 07:35:27.122711 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-n9c4r" event={"ID":"6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3","Type":"ContainerDied","Data":"ec2bcbb61c0ebe6b13e126ed4b05346a8f471c6269e13205bcedeee7bf414952"} Oct 08 07:35:27 crc kubenswrapper[4693]: I1008 07:35:27.122775 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec2bcbb61c0ebe6b13e126ed4b05346a8f471c6269e13205bcedeee7bf414952" Oct 08 07:35:27 crc kubenswrapper[4693]: I1008 07:35:27.123859 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-n9c4r" Oct 08 07:35:27 crc kubenswrapper[4693]: I1008 07:35:27.127263 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"71ae5ffc-8a6c-496c-83b3-3a394b90f549","Type":"ContainerStarted","Data":"c08ef2fccb2e347f9047947084efb535d2e4e42b585f18f5b65f6a3c5d7741ba"} Oct 08 07:35:27 crc kubenswrapper[4693]: I1008 07:35:27.221688 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 08 07:35:27 crc kubenswrapper[4693]: E1008 07:35:27.222140 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3" containerName="nova-cell0-conductor-db-sync" Oct 08 07:35:27 crc kubenswrapper[4693]: I1008 07:35:27.222164 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3" containerName="nova-cell0-conductor-db-sync" Oct 08 07:35:27 crc kubenswrapper[4693]: I1008 07:35:27.222374 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3" containerName="nova-cell0-conductor-db-sync" Oct 08 07:35:27 crc kubenswrapper[4693]: I1008 07:35:27.223108 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 08 07:35:27 crc kubenswrapper[4693]: I1008 07:35:27.230934 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-5b5zg" Oct 08 07:35:27 crc kubenswrapper[4693]: I1008 07:35:27.232368 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 08 07:35:27 crc kubenswrapper[4693]: I1008 07:35:27.260244 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 08 07:35:27 crc kubenswrapper[4693]: I1008 07:35:27.334977 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znhjg\" (UniqueName: \"kubernetes.io/projected/bf23439a-4d07-4711-9190-3fce06bdf2e4-kube-api-access-znhjg\") pod \"nova-cell0-conductor-0\" (UID: \"bf23439a-4d07-4711-9190-3fce06bdf2e4\") " pod="openstack/nova-cell0-conductor-0" Oct 08 07:35:27 crc kubenswrapper[4693]: I1008 07:35:27.335059 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf23439a-4d07-4711-9190-3fce06bdf2e4-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"bf23439a-4d07-4711-9190-3fce06bdf2e4\") " pod="openstack/nova-cell0-conductor-0" Oct 08 07:35:27 crc kubenswrapper[4693]: I1008 07:35:27.335175 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf23439a-4d07-4711-9190-3fce06bdf2e4-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"bf23439a-4d07-4711-9190-3fce06bdf2e4\") " pod="openstack/nova-cell0-conductor-0" Oct 08 07:35:27 crc kubenswrapper[4693]: I1008 07:35:27.436622 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znhjg\" (UniqueName: \"kubernetes.io/projected/bf23439a-4d07-4711-9190-3fce06bdf2e4-kube-api-access-znhjg\") pod \"nova-cell0-conductor-0\" (UID: \"bf23439a-4d07-4711-9190-3fce06bdf2e4\") " pod="openstack/nova-cell0-conductor-0" Oct 08 07:35:27 crc kubenswrapper[4693]: I1008 07:35:27.436688 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf23439a-4d07-4711-9190-3fce06bdf2e4-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"bf23439a-4d07-4711-9190-3fce06bdf2e4\") " pod="openstack/nova-cell0-conductor-0" Oct 08 07:35:27 crc kubenswrapper[4693]: I1008 07:35:27.436772 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf23439a-4d07-4711-9190-3fce06bdf2e4-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"bf23439a-4d07-4711-9190-3fce06bdf2e4\") " pod="openstack/nova-cell0-conductor-0" Oct 08 07:35:27 crc kubenswrapper[4693]: I1008 07:35:27.441072 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf23439a-4d07-4711-9190-3fce06bdf2e4-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"bf23439a-4d07-4711-9190-3fce06bdf2e4\") " pod="openstack/nova-cell0-conductor-0" Oct 08 07:35:27 crc kubenswrapper[4693]: I1008 07:35:27.441459 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf23439a-4d07-4711-9190-3fce06bdf2e4-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"bf23439a-4d07-4711-9190-3fce06bdf2e4\") " pod="openstack/nova-cell0-conductor-0" Oct 08 07:35:27 crc kubenswrapper[4693]: I1008 07:35:27.452187 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znhjg\" (UniqueName: \"kubernetes.io/projected/bf23439a-4d07-4711-9190-3fce06bdf2e4-kube-api-access-znhjg\") pod \"nova-cell0-conductor-0\" (UID: \"bf23439a-4d07-4711-9190-3fce06bdf2e4\") " pod="openstack/nova-cell0-conductor-0" Oct 08 07:35:27 crc kubenswrapper[4693]: I1008 07:35:27.538608 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 08 07:35:28 crc kubenswrapper[4693]: I1008 07:35:28.012152 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 08 07:35:28 crc kubenswrapper[4693]: W1008 07:35:28.025742 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbf23439a_4d07_4711_9190_3fce06bdf2e4.slice/crio-71ae7f42e96205b8f61c6b6f165b5632eae9c1c753f680b5765873c3fd1b7047 WatchSource:0}: Error finding container 71ae7f42e96205b8f61c6b6f165b5632eae9c1c753f680b5765873c3fd1b7047: Status 404 returned error can't find the container with id 71ae7f42e96205b8f61c6b6f165b5632eae9c1c753f680b5765873c3fd1b7047 Oct 08 07:35:28 crc kubenswrapper[4693]: I1008 07:35:28.147615 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"bf23439a-4d07-4711-9190-3fce06bdf2e4","Type":"ContainerStarted","Data":"71ae7f42e96205b8f61c6b6f165b5632eae9c1c753f680b5765873c3fd1b7047"} Oct 08 07:35:28 crc kubenswrapper[4693]: I1008 07:35:28.152629 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"71ae5ffc-8a6c-496c-83b3-3a394b90f549","Type":"ContainerStarted","Data":"fd0050d8e89905ba294159da36d938aae37716f8f657594defb5a128dacf0e95"} Oct 08 07:35:29 crc kubenswrapper[4693]: I1008 07:35:29.166330 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"bf23439a-4d07-4711-9190-3fce06bdf2e4","Type":"ContainerStarted","Data":"54fd23cc8e5bc6bc9208366e5ea5bf55addccf30ecc5c7e7aa58cec22a4ad711"} Oct 08 07:35:29 crc kubenswrapper[4693]: I1008 07:35:29.166498 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 08 07:35:29 crc kubenswrapper[4693]: I1008 07:35:29.187802 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.187779718 podStartE2EDuration="2.187779718s" podCreationTimestamp="2025-10-08 07:35:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:35:29.184275297 +0000 UTC m=+1114.555240232" watchObservedRunningTime="2025-10-08 07:35:29.187779718 +0000 UTC m=+1114.558744673" Oct 08 07:35:30 crc kubenswrapper[4693]: I1008 07:35:30.180937 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"71ae5ffc-8a6c-496c-83b3-3a394b90f549","Type":"ContainerStarted","Data":"609b5f50047627d1bad2d12f73ca998aaf0a10d1573f6f91135f548d660f9bf4"} Oct 08 07:35:30 crc kubenswrapper[4693]: I1008 07:35:30.181350 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 08 07:35:30 crc kubenswrapper[4693]: I1008 07:35:30.217124 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.797453441 podStartE2EDuration="6.217102928s" podCreationTimestamp="2025-10-08 07:35:24 +0000 UTC" firstStartedPulling="2025-10-08 07:35:24.970549807 +0000 UTC m=+1110.341514752" lastFinishedPulling="2025-10-08 07:35:29.390199264 +0000 UTC m=+1114.761164239" observedRunningTime="2025-10-08 07:35:30.209211515 +0000 UTC m=+1115.580176450" watchObservedRunningTime="2025-10-08 07:35:30.217102928 +0000 UTC m=+1115.588067863" Oct 08 07:35:37 crc kubenswrapper[4693]: I1008 07:35:37.581293 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.204305 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-pp99v"] Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.206233 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-pp99v" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.209266 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.209410 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.222372 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-pp99v"] Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.244875 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-pp99v\" (UID: \"f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d\") " pod="openstack/nova-cell0-cell-mapping-pp99v" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.245106 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d-scripts\") pod \"nova-cell0-cell-mapping-pp99v\" (UID: \"f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d\") " pod="openstack/nova-cell0-cell-mapping-pp99v" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.245305 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d-config-data\") pod \"nova-cell0-cell-mapping-pp99v\" (UID: \"f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d\") " pod="openstack/nova-cell0-cell-mapping-pp99v" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.245417 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64l87\" (UniqueName: \"kubernetes.io/projected/f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d-kube-api-access-64l87\") pod \"nova-cell0-cell-mapping-pp99v\" (UID: \"f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d\") " pod="openstack/nova-cell0-cell-mapping-pp99v" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.347408 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d-scripts\") pod \"nova-cell0-cell-mapping-pp99v\" (UID: \"f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d\") " pod="openstack/nova-cell0-cell-mapping-pp99v" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.347791 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d-config-data\") pod \"nova-cell0-cell-mapping-pp99v\" (UID: \"f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d\") " pod="openstack/nova-cell0-cell-mapping-pp99v" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.348780 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64l87\" (UniqueName: \"kubernetes.io/projected/f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d-kube-api-access-64l87\") pod \"nova-cell0-cell-mapping-pp99v\" (UID: \"f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d\") " pod="openstack/nova-cell0-cell-mapping-pp99v" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.349304 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-pp99v\" (UID: \"f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d\") " pod="openstack/nova-cell0-cell-mapping-pp99v" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.356791 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d-scripts\") pod \"nova-cell0-cell-mapping-pp99v\" (UID: \"f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d\") " pod="openstack/nova-cell0-cell-mapping-pp99v" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.357278 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-pp99v\" (UID: \"f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d\") " pod="openstack/nova-cell0-cell-mapping-pp99v" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.385450 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d-config-data\") pod \"nova-cell0-cell-mapping-pp99v\" (UID: \"f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d\") " pod="openstack/nova-cell0-cell-mapping-pp99v" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.398540 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64l87\" (UniqueName: \"kubernetes.io/projected/f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d-kube-api-access-64l87\") pod \"nova-cell0-cell-mapping-pp99v\" (UID: \"f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d\") " pod="openstack/nova-cell0-cell-mapping-pp99v" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.429903 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.431535 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.436334 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.460070 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b3434ec-c2c8-47cf-a2e4-dc61725564d7-config-data\") pod \"nova-api-0\" (UID: \"0b3434ec-c2c8-47cf-a2e4-dc61725564d7\") " pod="openstack/nova-api-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.460157 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qt7h\" (UniqueName: \"kubernetes.io/projected/0b3434ec-c2c8-47cf-a2e4-dc61725564d7-kube-api-access-6qt7h\") pod \"nova-api-0\" (UID: \"0b3434ec-c2c8-47cf-a2e4-dc61725564d7\") " pod="openstack/nova-api-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.460319 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b3434ec-c2c8-47cf-a2e4-dc61725564d7-logs\") pod \"nova-api-0\" (UID: \"0b3434ec-c2c8-47cf-a2e4-dc61725564d7\") " pod="openstack/nova-api-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.461087 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.461215 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b3434ec-c2c8-47cf-a2e4-dc61725564d7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0b3434ec-c2c8-47cf-a2e4-dc61725564d7\") " pod="openstack/nova-api-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.500593 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.502075 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.507920 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.535265 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-pp99v" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.562587 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b3434ec-c2c8-47cf-a2e4-dc61725564d7-config-data\") pod \"nova-api-0\" (UID: \"0b3434ec-c2c8-47cf-a2e4-dc61725564d7\") " pod="openstack/nova-api-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.562639 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qt7h\" (UniqueName: \"kubernetes.io/projected/0b3434ec-c2c8-47cf-a2e4-dc61725564d7-kube-api-access-6qt7h\") pod \"nova-api-0\" (UID: \"0b3434ec-c2c8-47cf-a2e4-dc61725564d7\") " pod="openstack/nova-api-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.562673 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ae77465-4a61-4432-88fc-e0ac674efbf7-config-data\") pod \"nova-metadata-0\" (UID: \"7ae77465-4a61-4432-88fc-e0ac674efbf7\") " pod="openstack/nova-metadata-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.562705 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b3434ec-c2c8-47cf-a2e4-dc61725564d7-logs\") pod \"nova-api-0\" (UID: \"0b3434ec-c2c8-47cf-a2e4-dc61725564d7\") " pod="openstack/nova-api-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.562729 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ae77465-4a61-4432-88fc-e0ac674efbf7-logs\") pod \"nova-metadata-0\" (UID: \"7ae77465-4a61-4432-88fc-e0ac674efbf7\") " pod="openstack/nova-metadata-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.563134 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ae77465-4a61-4432-88fc-e0ac674efbf7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7ae77465-4a61-4432-88fc-e0ac674efbf7\") " pod="openstack/nova-metadata-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.563179 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rn95v\" (UniqueName: \"kubernetes.io/projected/7ae77465-4a61-4432-88fc-e0ac674efbf7-kube-api-access-rn95v\") pod \"nova-metadata-0\" (UID: \"7ae77465-4a61-4432-88fc-e0ac674efbf7\") " pod="openstack/nova-metadata-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.563208 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b3434ec-c2c8-47cf-a2e4-dc61725564d7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0b3434ec-c2c8-47cf-a2e4-dc61725564d7\") " pod="openstack/nova-api-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.566922 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b3434ec-c2c8-47cf-a2e4-dc61725564d7-logs\") pod \"nova-api-0\" (UID: \"0b3434ec-c2c8-47cf-a2e4-dc61725564d7\") " pod="openstack/nova-api-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.567008 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.568436 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.580361 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.588841 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b3434ec-c2c8-47cf-a2e4-dc61725564d7-config-data\") pod \"nova-api-0\" (UID: \"0b3434ec-c2c8-47cf-a2e4-dc61725564d7\") " pod="openstack/nova-api-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.589491 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b3434ec-c2c8-47cf-a2e4-dc61725564d7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0b3434ec-c2c8-47cf-a2e4-dc61725564d7\") " pod="openstack/nova-api-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.592308 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qt7h\" (UniqueName: \"kubernetes.io/projected/0b3434ec-c2c8-47cf-a2e4-dc61725564d7-kube-api-access-6qt7h\") pod \"nova-api-0\" (UID: \"0b3434ec-c2c8-47cf-a2e4-dc61725564d7\") " pod="openstack/nova-api-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.624567 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.658716 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.666766 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ae77465-4a61-4432-88fc-e0ac674efbf7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7ae77465-4a61-4432-88fc-e0ac674efbf7\") " pod="openstack/nova-metadata-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.666835 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rn95v\" (UniqueName: \"kubernetes.io/projected/7ae77465-4a61-4432-88fc-e0ac674efbf7-kube-api-access-rn95v\") pod \"nova-metadata-0\" (UID: \"7ae77465-4a61-4432-88fc-e0ac674efbf7\") " pod="openstack/nova-metadata-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.666867 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2133c260-e214-428f-8f00-92d29d84594e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2133c260-e214-428f-8f00-92d29d84594e\") " pod="openstack/nova-scheduler-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.666936 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2133c260-e214-428f-8f00-92d29d84594e-config-data\") pod \"nova-scheduler-0\" (UID: \"2133c260-e214-428f-8f00-92d29d84594e\") " pod="openstack/nova-scheduler-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.666973 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9rzs\" (UniqueName: \"kubernetes.io/projected/2133c260-e214-428f-8f00-92d29d84594e-kube-api-access-c9rzs\") pod \"nova-scheduler-0\" (UID: \"2133c260-e214-428f-8f00-92d29d84594e\") " pod="openstack/nova-scheduler-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.667015 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ae77465-4a61-4432-88fc-e0ac674efbf7-config-data\") pod \"nova-metadata-0\" (UID: \"7ae77465-4a61-4432-88fc-e0ac674efbf7\") " pod="openstack/nova-metadata-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.667057 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ae77465-4a61-4432-88fc-e0ac674efbf7-logs\") pod \"nova-metadata-0\" (UID: \"7ae77465-4a61-4432-88fc-e0ac674efbf7\") " pod="openstack/nova-metadata-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.667419 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ae77465-4a61-4432-88fc-e0ac674efbf7-logs\") pod \"nova-metadata-0\" (UID: \"7ae77465-4a61-4432-88fc-e0ac674efbf7\") " pod="openstack/nova-metadata-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.676083 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ae77465-4a61-4432-88fc-e0ac674efbf7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7ae77465-4a61-4432-88fc-e0ac674efbf7\") " pod="openstack/nova-metadata-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.681507 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ae77465-4a61-4432-88fc-e0ac674efbf7-config-data\") pod \"nova-metadata-0\" (UID: \"7ae77465-4a61-4432-88fc-e0ac674efbf7\") " pod="openstack/nova-metadata-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.684086 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.686511 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.687594 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rn95v\" (UniqueName: \"kubernetes.io/projected/7ae77465-4a61-4432-88fc-e0ac674efbf7-kube-api-access-rn95v\") pod \"nova-metadata-0\" (UID: \"7ae77465-4a61-4432-88fc-e0ac674efbf7\") " pod="openstack/nova-metadata-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.690602 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.700515 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-s92d2"] Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.702654 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-s92d2" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.704998 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.767325 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.770173 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/936e36b3-6be5-4b41-9473-08474ea5443c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"936e36b3-6be5-4b41-9473-08474ea5443c\") " pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.770230 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-config\") pod \"dnsmasq-dns-757b4f8459-s92d2\" (UID: \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\") " pod="openstack/dnsmasq-dns-757b4f8459-s92d2" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.770299 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/936e36b3-6be5-4b41-9473-08474ea5443c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"936e36b3-6be5-4b41-9473-08474ea5443c\") " pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.770324 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2133c260-e214-428f-8f00-92d29d84594e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2133c260-e214-428f-8f00-92d29d84594e\") " pod="openstack/nova-scheduler-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.770387 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfgk2\" (UniqueName: \"kubernetes.io/projected/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-kube-api-access-dfgk2\") pod \"dnsmasq-dns-757b4f8459-s92d2\" (UID: \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\") " pod="openstack/dnsmasq-dns-757b4f8459-s92d2" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.770417 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vc6lv\" (UniqueName: \"kubernetes.io/projected/936e36b3-6be5-4b41-9473-08474ea5443c-kube-api-access-vc6lv\") pod \"nova-cell1-novncproxy-0\" (UID: \"936e36b3-6be5-4b41-9473-08474ea5443c\") " pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.770475 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-s92d2\" (UID: \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\") " pod="openstack/dnsmasq-dns-757b4f8459-s92d2" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.770533 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2133c260-e214-428f-8f00-92d29d84594e-config-data\") pod \"nova-scheduler-0\" (UID: \"2133c260-e214-428f-8f00-92d29d84594e\") " pod="openstack/nova-scheduler-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.770554 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-s92d2\" (UID: \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\") " pod="openstack/dnsmasq-dns-757b4f8459-s92d2" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.770583 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-dns-svc\") pod \"dnsmasq-dns-757b4f8459-s92d2\" (UID: \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\") " pod="openstack/dnsmasq-dns-757b4f8459-s92d2" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.770800 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-s92d2\" (UID: \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\") " pod="openstack/dnsmasq-dns-757b4f8459-s92d2" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.770968 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-s92d2"] Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.770849 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9rzs\" (UniqueName: \"kubernetes.io/projected/2133c260-e214-428f-8f00-92d29d84594e-kube-api-access-c9rzs\") pod \"nova-scheduler-0\" (UID: \"2133c260-e214-428f-8f00-92d29d84594e\") " pod="openstack/nova-scheduler-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.777419 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2133c260-e214-428f-8f00-92d29d84594e-config-data\") pod \"nova-scheduler-0\" (UID: \"2133c260-e214-428f-8f00-92d29d84594e\") " pod="openstack/nova-scheduler-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.777432 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2133c260-e214-428f-8f00-92d29d84594e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2133c260-e214-428f-8f00-92d29d84594e\") " pod="openstack/nova-scheduler-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.786041 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9rzs\" (UniqueName: \"kubernetes.io/projected/2133c260-e214-428f-8f00-92d29d84594e-kube-api-access-c9rzs\") pod \"nova-scheduler-0\" (UID: \"2133c260-e214-428f-8f00-92d29d84594e\") " pod="openstack/nova-scheduler-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.820916 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.877419 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/936e36b3-6be5-4b41-9473-08474ea5443c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"936e36b3-6be5-4b41-9473-08474ea5443c\") " pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.877694 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-config\") pod \"dnsmasq-dns-757b4f8459-s92d2\" (UID: \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\") " pod="openstack/dnsmasq-dns-757b4f8459-s92d2" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.877738 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/936e36b3-6be5-4b41-9473-08474ea5443c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"936e36b3-6be5-4b41-9473-08474ea5443c\") " pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.877775 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfgk2\" (UniqueName: \"kubernetes.io/projected/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-kube-api-access-dfgk2\") pod \"dnsmasq-dns-757b4f8459-s92d2\" (UID: \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\") " pod="openstack/dnsmasq-dns-757b4f8459-s92d2" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.877799 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vc6lv\" (UniqueName: \"kubernetes.io/projected/936e36b3-6be5-4b41-9473-08474ea5443c-kube-api-access-vc6lv\") pod \"nova-cell1-novncproxy-0\" (UID: \"936e36b3-6be5-4b41-9473-08474ea5443c\") " pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.877849 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-s92d2\" (UID: \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\") " pod="openstack/dnsmasq-dns-757b4f8459-s92d2" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.877869 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-s92d2\" (UID: \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\") " pod="openstack/dnsmasq-dns-757b4f8459-s92d2" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.877892 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-dns-svc\") pod \"dnsmasq-dns-757b4f8459-s92d2\" (UID: \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\") " pod="openstack/dnsmasq-dns-757b4f8459-s92d2" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.877915 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-s92d2\" (UID: \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\") " pod="openstack/dnsmasq-dns-757b4f8459-s92d2" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.878654 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-s92d2\" (UID: \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\") " pod="openstack/dnsmasq-dns-757b4f8459-s92d2" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.879178 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-config\") pod \"dnsmasq-dns-757b4f8459-s92d2\" (UID: \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\") " pod="openstack/dnsmasq-dns-757b4f8459-s92d2" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.879189 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-s92d2\" (UID: \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\") " pod="openstack/dnsmasq-dns-757b4f8459-s92d2" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.879840 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-s92d2\" (UID: \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\") " pod="openstack/dnsmasq-dns-757b4f8459-s92d2" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.880210 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-dns-svc\") pod \"dnsmasq-dns-757b4f8459-s92d2\" (UID: \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\") " pod="openstack/dnsmasq-dns-757b4f8459-s92d2" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.885703 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/936e36b3-6be5-4b41-9473-08474ea5443c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"936e36b3-6be5-4b41-9473-08474ea5443c\") " pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.888527 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/936e36b3-6be5-4b41-9473-08474ea5443c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"936e36b3-6be5-4b41-9473-08474ea5443c\") " pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.895157 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vc6lv\" (UniqueName: \"kubernetes.io/projected/936e36b3-6be5-4b41-9473-08474ea5443c-kube-api-access-vc6lv\") pod \"nova-cell1-novncproxy-0\" (UID: \"936e36b3-6be5-4b41-9473-08474ea5443c\") " pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:35:38 crc kubenswrapper[4693]: I1008 07:35:38.900285 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfgk2\" (UniqueName: \"kubernetes.io/projected/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-kube-api-access-dfgk2\") pod \"dnsmasq-dns-757b4f8459-s92d2\" (UID: \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\") " pod="openstack/dnsmasq-dns-757b4f8459-s92d2" Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.055348 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.065260 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.080245 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-s92d2" Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.184193 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-pp99v"] Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.289401 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-pp99v" event={"ID":"f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d","Type":"ContainerStarted","Data":"9544d80c7822fe96f5867b5630022d97e290cadf32b0d65c3b068d9ab82a9575"} Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.312625 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 08 07:35:39 crc kubenswrapper[4693]: W1008 07:35:39.315521 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0b3434ec_c2c8_47cf_a2e4_dc61725564d7.slice/crio-5308ffe4486b2969e831b82f5207a57ac28f4f64085b9ff008d8b95d3ebb2b0d WatchSource:0}: Error finding container 5308ffe4486b2969e831b82f5207a57ac28f4f64085b9ff008d8b95d3ebb2b0d: Status 404 returned error can't find the container with id 5308ffe4486b2969e831b82f5207a57ac28f4f64085b9ff008d8b95d3ebb2b0d Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.322684 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-69qw5"] Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.324128 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-69qw5" Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.330359 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.330535 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.384533 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-69qw5"] Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.393009 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7338453-7794-4bcd-8204-a189c93b7606-config-data\") pod \"nova-cell1-conductor-db-sync-69qw5\" (UID: \"c7338453-7794-4bcd-8204-a189c93b7606\") " pod="openstack/nova-cell1-conductor-db-sync-69qw5" Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.393129 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7338453-7794-4bcd-8204-a189c93b7606-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-69qw5\" (UID: \"c7338453-7794-4bcd-8204-a189c93b7606\") " pod="openstack/nova-cell1-conductor-db-sync-69qw5" Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.393204 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7338453-7794-4bcd-8204-a189c93b7606-scripts\") pod \"nova-cell1-conductor-db-sync-69qw5\" (UID: \"c7338453-7794-4bcd-8204-a189c93b7606\") " pod="openstack/nova-cell1-conductor-db-sync-69qw5" Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.393301 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-psfns\" (UniqueName: \"kubernetes.io/projected/c7338453-7794-4bcd-8204-a189c93b7606-kube-api-access-psfns\") pod \"nova-cell1-conductor-db-sync-69qw5\" (UID: \"c7338453-7794-4bcd-8204-a189c93b7606\") " pod="openstack/nova-cell1-conductor-db-sync-69qw5" Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.407147 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.497745 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7338453-7794-4bcd-8204-a189c93b7606-config-data\") pod \"nova-cell1-conductor-db-sync-69qw5\" (UID: \"c7338453-7794-4bcd-8204-a189c93b7606\") " pod="openstack/nova-cell1-conductor-db-sync-69qw5" Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.498053 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7338453-7794-4bcd-8204-a189c93b7606-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-69qw5\" (UID: \"c7338453-7794-4bcd-8204-a189c93b7606\") " pod="openstack/nova-cell1-conductor-db-sync-69qw5" Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.498089 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7338453-7794-4bcd-8204-a189c93b7606-scripts\") pod \"nova-cell1-conductor-db-sync-69qw5\" (UID: \"c7338453-7794-4bcd-8204-a189c93b7606\") " pod="openstack/nova-cell1-conductor-db-sync-69qw5" Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.498145 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-psfns\" (UniqueName: \"kubernetes.io/projected/c7338453-7794-4bcd-8204-a189c93b7606-kube-api-access-psfns\") pod \"nova-cell1-conductor-db-sync-69qw5\" (UID: \"c7338453-7794-4bcd-8204-a189c93b7606\") " pod="openstack/nova-cell1-conductor-db-sync-69qw5" Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.502503 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7338453-7794-4bcd-8204-a189c93b7606-config-data\") pod \"nova-cell1-conductor-db-sync-69qw5\" (UID: \"c7338453-7794-4bcd-8204-a189c93b7606\") " pod="openstack/nova-cell1-conductor-db-sync-69qw5" Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.505010 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7338453-7794-4bcd-8204-a189c93b7606-scripts\") pod \"nova-cell1-conductor-db-sync-69qw5\" (UID: \"c7338453-7794-4bcd-8204-a189c93b7606\") " pod="openstack/nova-cell1-conductor-db-sync-69qw5" Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.505008 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7338453-7794-4bcd-8204-a189c93b7606-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-69qw5\" (UID: \"c7338453-7794-4bcd-8204-a189c93b7606\") " pod="openstack/nova-cell1-conductor-db-sync-69qw5" Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.514799 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-psfns\" (UniqueName: \"kubernetes.io/projected/c7338453-7794-4bcd-8204-a189c93b7606-kube-api-access-psfns\") pod \"nova-cell1-conductor-db-sync-69qw5\" (UID: \"c7338453-7794-4bcd-8204-a189c93b7606\") " pod="openstack/nova-cell1-conductor-db-sync-69qw5" Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.624147 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-s92d2"] Oct 08 07:35:39 crc kubenswrapper[4693]: W1008 07:35:39.634973 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod15bacf5c_3c15_48c3_9c6c_478bd9e5d599.slice/crio-44fd18740f24164332bed42b1f1072a4557bc3f054870d313d04650719534754 WatchSource:0}: Error finding container 44fd18740f24164332bed42b1f1072a4557bc3f054870d313d04650719534754: Status 404 returned error can't find the container with id 44fd18740f24164332bed42b1f1072a4557bc3f054870d313d04650719534754 Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.654729 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.759902 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 08 07:35:39 crc kubenswrapper[4693]: W1008 07:35:39.768577 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod936e36b3_6be5_4b41_9473_08474ea5443c.slice/crio-662a97075996d687c3f3e9cf57adbddbcbef4829514d99b2217715832b1537ce WatchSource:0}: Error finding container 662a97075996d687c3f3e9cf57adbddbcbef4829514d99b2217715832b1537ce: Status 404 returned error can't find the container with id 662a97075996d687c3f3e9cf57adbddbcbef4829514d99b2217715832b1537ce Oct 08 07:35:39 crc kubenswrapper[4693]: I1008 07:35:39.785132 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-69qw5" Oct 08 07:35:40 crc kubenswrapper[4693]: I1008 07:35:40.285278 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-69qw5"] Oct 08 07:35:40 crc kubenswrapper[4693]: W1008 07:35:40.302169 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc7338453_7794_4bcd_8204_a189c93b7606.slice/crio-980f668553fc4574da82a650d1afacb4773a771469aba751ccaf89519e59f2eb WatchSource:0}: Error finding container 980f668553fc4574da82a650d1afacb4773a771469aba751ccaf89519e59f2eb: Status 404 returned error can't find the container with id 980f668553fc4574da82a650d1afacb4773a771469aba751ccaf89519e59f2eb Oct 08 07:35:40 crc kubenswrapper[4693]: I1008 07:35:40.305932 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-pp99v" event={"ID":"f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d","Type":"ContainerStarted","Data":"b77adafe28931ba499f21cd52f784a5e97252fbad0dbe0f6bb6ff3b6e4d5debe"} Oct 08 07:35:40 crc kubenswrapper[4693]: I1008 07:35:40.308447 4693 generic.go:334] "Generic (PLEG): container finished" podID="15bacf5c-3c15-48c3-9c6c-478bd9e5d599" containerID="cea13c521cc757b8866c612a37cc18e1619ac54e843be832de860c6b2e0b6114" exitCode=0 Oct 08 07:35:40 crc kubenswrapper[4693]: I1008 07:35:40.308525 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-s92d2" event={"ID":"15bacf5c-3c15-48c3-9c6c-478bd9e5d599","Type":"ContainerDied","Data":"cea13c521cc757b8866c612a37cc18e1619ac54e843be832de860c6b2e0b6114"} Oct 08 07:35:40 crc kubenswrapper[4693]: I1008 07:35:40.308587 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-s92d2" event={"ID":"15bacf5c-3c15-48c3-9c6c-478bd9e5d599","Type":"ContainerStarted","Data":"44fd18740f24164332bed42b1f1072a4557bc3f054870d313d04650719534754"} Oct 08 07:35:40 crc kubenswrapper[4693]: I1008 07:35:40.311413 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2133c260-e214-428f-8f00-92d29d84594e","Type":"ContainerStarted","Data":"ccd515f86f6307530399aa33d74588bf93202a65cc4a1dd8116303f3f0a912f2"} Oct 08 07:35:40 crc kubenswrapper[4693]: I1008 07:35:40.315950 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0b3434ec-c2c8-47cf-a2e4-dc61725564d7","Type":"ContainerStarted","Data":"5308ffe4486b2969e831b82f5207a57ac28f4f64085b9ff008d8b95d3ebb2b0d"} Oct 08 07:35:40 crc kubenswrapper[4693]: I1008 07:35:40.322455 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"936e36b3-6be5-4b41-9473-08474ea5443c","Type":"ContainerStarted","Data":"662a97075996d687c3f3e9cf57adbddbcbef4829514d99b2217715832b1537ce"} Oct 08 07:35:40 crc kubenswrapper[4693]: I1008 07:35:40.324117 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7ae77465-4a61-4432-88fc-e0ac674efbf7","Type":"ContainerStarted","Data":"5bd66baeaefc848853aa8832a7415d50c7f99b0611df4bc8b63ea236d2aa3cc1"} Oct 08 07:35:40 crc kubenswrapper[4693]: I1008 07:35:40.328444 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-pp99v" podStartSLOduration=2.328423654 podStartE2EDuration="2.328423654s" podCreationTimestamp="2025-10-08 07:35:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:35:40.321715268 +0000 UTC m=+1125.692680203" watchObservedRunningTime="2025-10-08 07:35:40.328423654 +0000 UTC m=+1125.699388609" Oct 08 07:35:41 crc kubenswrapper[4693]: I1008 07:35:41.337220 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-69qw5" event={"ID":"c7338453-7794-4bcd-8204-a189c93b7606","Type":"ContainerStarted","Data":"4a42a01382abaa6f0eff26fd468b5aaf43f96272176431a500b6c92b5123b8be"} Oct 08 07:35:41 crc kubenswrapper[4693]: I1008 07:35:41.337597 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-69qw5" event={"ID":"c7338453-7794-4bcd-8204-a189c93b7606","Type":"ContainerStarted","Data":"980f668553fc4574da82a650d1afacb4773a771469aba751ccaf89519e59f2eb"} Oct 08 07:35:41 crc kubenswrapper[4693]: I1008 07:35:41.342914 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-s92d2" event={"ID":"15bacf5c-3c15-48c3-9c6c-478bd9e5d599","Type":"ContainerStarted","Data":"58dc5fcdcafbdc23a9631b266d6eb65688b3011c23f7ebc8c76fdfcfe67b74ab"} Oct 08 07:35:41 crc kubenswrapper[4693]: I1008 07:35:41.342972 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-757b4f8459-s92d2" Oct 08 07:35:41 crc kubenswrapper[4693]: I1008 07:35:41.360604 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-69qw5" podStartSLOduration=2.360582239 podStartE2EDuration="2.360582239s" podCreationTimestamp="2025-10-08 07:35:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:35:41.353424284 +0000 UTC m=+1126.724389259" watchObservedRunningTime="2025-10-08 07:35:41.360582239 +0000 UTC m=+1126.731547174" Oct 08 07:35:41 crc kubenswrapper[4693]: I1008 07:35:41.380737 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-757b4f8459-s92d2" podStartSLOduration=3.380717005 podStartE2EDuration="3.380717005s" podCreationTimestamp="2025-10-08 07:35:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:35:41.370237883 +0000 UTC m=+1126.741202818" watchObservedRunningTime="2025-10-08 07:35:41.380717005 +0000 UTC m=+1126.751681940" Oct 08 07:35:41 crc kubenswrapper[4693]: I1008 07:35:41.975879 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 08 07:35:41 crc kubenswrapper[4693]: I1008 07:35:41.997883 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 08 07:35:43 crc kubenswrapper[4693]: I1008 07:35:43.387134 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0b3434ec-c2c8-47cf-a2e4-dc61725564d7","Type":"ContainerStarted","Data":"9c9df325479f33b98f1bd9ef16397c253b77e71fc52d512daa665b9ae419a7c9"} Oct 08 07:35:43 crc kubenswrapper[4693]: I1008 07:35:43.387626 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0b3434ec-c2c8-47cf-a2e4-dc61725564d7","Type":"ContainerStarted","Data":"ed18afc701feb69fe62c60af4cacc371b91e159357d41d3301318be9d2e67554"} Oct 08 07:35:43 crc kubenswrapper[4693]: I1008 07:35:43.399373 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"936e36b3-6be5-4b41-9473-08474ea5443c","Type":"ContainerStarted","Data":"c9e93435cd9329fe1dc590f49028d492162446a1979f70e767d0cc0921b714a0"} Oct 08 07:35:43 crc kubenswrapper[4693]: I1008 07:35:43.399503 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="936e36b3-6be5-4b41-9473-08474ea5443c" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://c9e93435cd9329fe1dc590f49028d492162446a1979f70e767d0cc0921b714a0" gracePeriod=30 Oct 08 07:35:43 crc kubenswrapper[4693]: I1008 07:35:43.403467 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7ae77465-4a61-4432-88fc-e0ac674efbf7","Type":"ContainerStarted","Data":"ee21ca3178aa311143997a5c4039047d39311bdce47571b2f149e2cff9e0d37b"} Oct 08 07:35:43 crc kubenswrapper[4693]: I1008 07:35:43.403496 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7ae77465-4a61-4432-88fc-e0ac674efbf7","Type":"ContainerStarted","Data":"0051a883b6e27bbdec8fe80720c5966b7b50ecf508ae8d397b15d867dd7eb193"} Oct 08 07:35:43 crc kubenswrapper[4693]: I1008 07:35:43.403594 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="7ae77465-4a61-4432-88fc-e0ac674efbf7" containerName="nova-metadata-log" containerID="cri-o://0051a883b6e27bbdec8fe80720c5966b7b50ecf508ae8d397b15d867dd7eb193" gracePeriod=30 Oct 08 07:35:43 crc kubenswrapper[4693]: I1008 07:35:43.403661 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="7ae77465-4a61-4432-88fc-e0ac674efbf7" containerName="nova-metadata-metadata" containerID="cri-o://ee21ca3178aa311143997a5c4039047d39311bdce47571b2f149e2cff9e0d37b" gracePeriod=30 Oct 08 07:35:43 crc kubenswrapper[4693]: I1008 07:35:43.406401 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2133c260-e214-428f-8f00-92d29d84594e","Type":"ContainerStarted","Data":"7b4e11bdecbe9cb03c431fa5494caa069173285462521af71d37d886344fb4eb"} Oct 08 07:35:43 crc kubenswrapper[4693]: I1008 07:35:43.415542 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.288320225 podStartE2EDuration="5.415516042s" podCreationTimestamp="2025-10-08 07:35:38 +0000 UTC" firstStartedPulling="2025-10-08 07:35:39.32609543 +0000 UTC m=+1124.697060365" lastFinishedPulling="2025-10-08 07:35:42.453291247 +0000 UTC m=+1127.824256182" observedRunningTime="2025-10-08 07:35:43.410007375 +0000 UTC m=+1128.780972310" watchObservedRunningTime="2025-10-08 07:35:43.415516042 +0000 UTC m=+1128.786480977" Oct 08 07:35:43 crc kubenswrapper[4693]: I1008 07:35:43.433146 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.650585241 podStartE2EDuration="5.43312606s" podCreationTimestamp="2025-10-08 07:35:38 +0000 UTC" firstStartedPulling="2025-10-08 07:35:39.664459332 +0000 UTC m=+1125.035424257" lastFinishedPulling="2025-10-08 07:35:42.447000121 +0000 UTC m=+1127.817965076" observedRunningTime="2025-10-08 07:35:43.426997918 +0000 UTC m=+1128.797962853" watchObservedRunningTime="2025-10-08 07:35:43.43312606 +0000 UTC m=+1128.804090985" Oct 08 07:35:43 crc kubenswrapper[4693]: I1008 07:35:43.449856 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.771721295 podStartE2EDuration="5.449764845s" podCreationTimestamp="2025-10-08 07:35:38 +0000 UTC" firstStartedPulling="2025-10-08 07:35:39.770986848 +0000 UTC m=+1125.141951783" lastFinishedPulling="2025-10-08 07:35:42.449030398 +0000 UTC m=+1127.819995333" observedRunningTime="2025-10-08 07:35:43.44134363 +0000 UTC m=+1128.812308555" watchObservedRunningTime="2025-10-08 07:35:43.449764845 +0000 UTC m=+1128.820729770" Oct 08 07:35:43 crc kubenswrapper[4693]: I1008 07:35:43.466996 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.423908774 podStartE2EDuration="5.466980664s" podCreationTimestamp="2025-10-08 07:35:38 +0000 UTC" firstStartedPulling="2025-10-08 07:35:39.405987699 +0000 UTC m=+1124.776952634" lastFinishedPulling="2025-10-08 07:35:42.449059589 +0000 UTC m=+1127.820024524" observedRunningTime="2025-10-08 07:35:43.464634029 +0000 UTC m=+1128.835598964" watchObservedRunningTime="2025-10-08 07:35:43.466980664 +0000 UTC m=+1128.837945599" Oct 08 07:35:43 crc kubenswrapper[4693]: E1008 07:35:43.478515 4693 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ae77465_4a61_4432_88fc_e0ac674efbf7.slice/crio-0051a883b6e27bbdec8fe80720c5966b7b50ecf508ae8d397b15d867dd7eb193.scope\": RecentStats: unable to find data in memory cache]" Oct 08 07:35:43 crc kubenswrapper[4693]: I1008 07:35:43.821832 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 08 07:35:43 crc kubenswrapper[4693]: I1008 07:35:43.822198 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.056155 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.066779 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.076931 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.203146 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ae77465-4a61-4432-88fc-e0ac674efbf7-combined-ca-bundle\") pod \"7ae77465-4a61-4432-88fc-e0ac674efbf7\" (UID: \"7ae77465-4a61-4432-88fc-e0ac674efbf7\") " Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.203291 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rn95v\" (UniqueName: \"kubernetes.io/projected/7ae77465-4a61-4432-88fc-e0ac674efbf7-kube-api-access-rn95v\") pod \"7ae77465-4a61-4432-88fc-e0ac674efbf7\" (UID: \"7ae77465-4a61-4432-88fc-e0ac674efbf7\") " Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.203331 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ae77465-4a61-4432-88fc-e0ac674efbf7-logs\") pod \"7ae77465-4a61-4432-88fc-e0ac674efbf7\" (UID: \"7ae77465-4a61-4432-88fc-e0ac674efbf7\") " Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.203381 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ae77465-4a61-4432-88fc-e0ac674efbf7-config-data\") pod \"7ae77465-4a61-4432-88fc-e0ac674efbf7\" (UID: \"7ae77465-4a61-4432-88fc-e0ac674efbf7\") " Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.203740 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ae77465-4a61-4432-88fc-e0ac674efbf7-logs" (OuterVolumeSpecName: "logs") pod "7ae77465-4a61-4432-88fc-e0ac674efbf7" (UID: "7ae77465-4a61-4432-88fc-e0ac674efbf7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.204379 4693 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ae77465-4a61-4432-88fc-e0ac674efbf7-logs\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.209198 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ae77465-4a61-4432-88fc-e0ac674efbf7-kube-api-access-rn95v" (OuterVolumeSpecName: "kube-api-access-rn95v") pod "7ae77465-4a61-4432-88fc-e0ac674efbf7" (UID: "7ae77465-4a61-4432-88fc-e0ac674efbf7"). InnerVolumeSpecName "kube-api-access-rn95v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.234947 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ae77465-4a61-4432-88fc-e0ac674efbf7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7ae77465-4a61-4432-88fc-e0ac674efbf7" (UID: "7ae77465-4a61-4432-88fc-e0ac674efbf7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.241156 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ae77465-4a61-4432-88fc-e0ac674efbf7-config-data" (OuterVolumeSpecName: "config-data") pod "7ae77465-4a61-4432-88fc-e0ac674efbf7" (UID: "7ae77465-4a61-4432-88fc-e0ac674efbf7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.306380 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ae77465-4a61-4432-88fc-e0ac674efbf7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.306428 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rn95v\" (UniqueName: \"kubernetes.io/projected/7ae77465-4a61-4432-88fc-e0ac674efbf7-kube-api-access-rn95v\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.306445 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ae77465-4a61-4432-88fc-e0ac674efbf7-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.416348 4693 generic.go:334] "Generic (PLEG): container finished" podID="7ae77465-4a61-4432-88fc-e0ac674efbf7" containerID="ee21ca3178aa311143997a5c4039047d39311bdce47571b2f149e2cff9e0d37b" exitCode=0 Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.416667 4693 generic.go:334] "Generic (PLEG): container finished" podID="7ae77465-4a61-4432-88fc-e0ac674efbf7" containerID="0051a883b6e27bbdec8fe80720c5966b7b50ecf508ae8d397b15d867dd7eb193" exitCode=143 Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.416416 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7ae77465-4a61-4432-88fc-e0ac674efbf7","Type":"ContainerDied","Data":"ee21ca3178aa311143997a5c4039047d39311bdce47571b2f149e2cff9e0d37b"} Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.416414 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.416739 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7ae77465-4a61-4432-88fc-e0ac674efbf7","Type":"ContainerDied","Data":"0051a883b6e27bbdec8fe80720c5966b7b50ecf508ae8d397b15d867dd7eb193"} Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.416757 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7ae77465-4a61-4432-88fc-e0ac674efbf7","Type":"ContainerDied","Data":"5bd66baeaefc848853aa8832a7415d50c7f99b0611df4bc8b63ea236d2aa3cc1"} Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.416772 4693 scope.go:117] "RemoveContainer" containerID="ee21ca3178aa311143997a5c4039047d39311bdce47571b2f149e2cff9e0d37b" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.441672 4693 scope.go:117] "RemoveContainer" containerID="0051a883b6e27bbdec8fe80720c5966b7b50ecf508ae8d397b15d867dd7eb193" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.467084 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.468147 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.468596 4693 scope.go:117] "RemoveContainer" containerID="ee21ca3178aa311143997a5c4039047d39311bdce47571b2f149e2cff9e0d37b" Oct 08 07:35:44 crc kubenswrapper[4693]: E1008 07:35:44.470607 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee21ca3178aa311143997a5c4039047d39311bdce47571b2f149e2cff9e0d37b\": container with ID starting with ee21ca3178aa311143997a5c4039047d39311bdce47571b2f149e2cff9e0d37b not found: ID does not exist" containerID="ee21ca3178aa311143997a5c4039047d39311bdce47571b2f149e2cff9e0d37b" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.470651 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee21ca3178aa311143997a5c4039047d39311bdce47571b2f149e2cff9e0d37b"} err="failed to get container status \"ee21ca3178aa311143997a5c4039047d39311bdce47571b2f149e2cff9e0d37b\": rpc error: code = NotFound desc = could not find container \"ee21ca3178aa311143997a5c4039047d39311bdce47571b2f149e2cff9e0d37b\": container with ID starting with ee21ca3178aa311143997a5c4039047d39311bdce47571b2f149e2cff9e0d37b not found: ID does not exist" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.470681 4693 scope.go:117] "RemoveContainer" containerID="0051a883b6e27bbdec8fe80720c5966b7b50ecf508ae8d397b15d867dd7eb193" Oct 08 07:35:44 crc kubenswrapper[4693]: E1008 07:35:44.471020 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0051a883b6e27bbdec8fe80720c5966b7b50ecf508ae8d397b15d867dd7eb193\": container with ID starting with 0051a883b6e27bbdec8fe80720c5966b7b50ecf508ae8d397b15d867dd7eb193 not found: ID does not exist" containerID="0051a883b6e27bbdec8fe80720c5966b7b50ecf508ae8d397b15d867dd7eb193" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.471051 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0051a883b6e27bbdec8fe80720c5966b7b50ecf508ae8d397b15d867dd7eb193"} err="failed to get container status \"0051a883b6e27bbdec8fe80720c5966b7b50ecf508ae8d397b15d867dd7eb193\": rpc error: code = NotFound desc = could not find container \"0051a883b6e27bbdec8fe80720c5966b7b50ecf508ae8d397b15d867dd7eb193\": container with ID starting with 0051a883b6e27bbdec8fe80720c5966b7b50ecf508ae8d397b15d867dd7eb193 not found: ID does not exist" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.471066 4693 scope.go:117] "RemoveContainer" containerID="ee21ca3178aa311143997a5c4039047d39311bdce47571b2f149e2cff9e0d37b" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.471767 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee21ca3178aa311143997a5c4039047d39311bdce47571b2f149e2cff9e0d37b"} err="failed to get container status \"ee21ca3178aa311143997a5c4039047d39311bdce47571b2f149e2cff9e0d37b\": rpc error: code = NotFound desc = could not find container \"ee21ca3178aa311143997a5c4039047d39311bdce47571b2f149e2cff9e0d37b\": container with ID starting with ee21ca3178aa311143997a5c4039047d39311bdce47571b2f149e2cff9e0d37b not found: ID does not exist" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.471792 4693 scope.go:117] "RemoveContainer" containerID="0051a883b6e27bbdec8fe80720c5966b7b50ecf508ae8d397b15d867dd7eb193" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.472071 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0051a883b6e27bbdec8fe80720c5966b7b50ecf508ae8d397b15d867dd7eb193"} err="failed to get container status \"0051a883b6e27bbdec8fe80720c5966b7b50ecf508ae8d397b15d867dd7eb193\": rpc error: code = NotFound desc = could not find container \"0051a883b6e27bbdec8fe80720c5966b7b50ecf508ae8d397b15d867dd7eb193\": container with ID starting with 0051a883b6e27bbdec8fe80720c5966b7b50ecf508ae8d397b15d867dd7eb193 not found: ID does not exist" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.482476 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 08 07:35:44 crc kubenswrapper[4693]: E1008 07:35:44.488531 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ae77465-4a61-4432-88fc-e0ac674efbf7" containerName="nova-metadata-metadata" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.488571 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ae77465-4a61-4432-88fc-e0ac674efbf7" containerName="nova-metadata-metadata" Oct 08 07:35:44 crc kubenswrapper[4693]: E1008 07:35:44.488591 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ae77465-4a61-4432-88fc-e0ac674efbf7" containerName="nova-metadata-log" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.488601 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ae77465-4a61-4432-88fc-e0ac674efbf7" containerName="nova-metadata-log" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.488904 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ae77465-4a61-4432-88fc-e0ac674efbf7" containerName="nova-metadata-log" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.488938 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ae77465-4a61-4432-88fc-e0ac674efbf7" containerName="nova-metadata-metadata" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.490234 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.495296 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.495317 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.509050 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.612400 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/fcf290a6-a971-4dbe-8b49-2898d7f449d2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"fcf290a6-a971-4dbe-8b49-2898d7f449d2\") " pod="openstack/nova-metadata-0" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.612458 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p25kr\" (UniqueName: \"kubernetes.io/projected/fcf290a6-a971-4dbe-8b49-2898d7f449d2-kube-api-access-p25kr\") pod \"nova-metadata-0\" (UID: \"fcf290a6-a971-4dbe-8b49-2898d7f449d2\") " pod="openstack/nova-metadata-0" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.612641 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcf290a6-a971-4dbe-8b49-2898d7f449d2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fcf290a6-a971-4dbe-8b49-2898d7f449d2\") " pod="openstack/nova-metadata-0" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.612847 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcf290a6-a971-4dbe-8b49-2898d7f449d2-config-data\") pod \"nova-metadata-0\" (UID: \"fcf290a6-a971-4dbe-8b49-2898d7f449d2\") " pod="openstack/nova-metadata-0" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.612912 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcf290a6-a971-4dbe-8b49-2898d7f449d2-logs\") pod \"nova-metadata-0\" (UID: \"fcf290a6-a971-4dbe-8b49-2898d7f449d2\") " pod="openstack/nova-metadata-0" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.714737 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcf290a6-a971-4dbe-8b49-2898d7f449d2-config-data\") pod \"nova-metadata-0\" (UID: \"fcf290a6-a971-4dbe-8b49-2898d7f449d2\") " pod="openstack/nova-metadata-0" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.714803 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcf290a6-a971-4dbe-8b49-2898d7f449d2-logs\") pod \"nova-metadata-0\" (UID: \"fcf290a6-a971-4dbe-8b49-2898d7f449d2\") " pod="openstack/nova-metadata-0" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.714941 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/fcf290a6-a971-4dbe-8b49-2898d7f449d2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"fcf290a6-a971-4dbe-8b49-2898d7f449d2\") " pod="openstack/nova-metadata-0" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.714981 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p25kr\" (UniqueName: \"kubernetes.io/projected/fcf290a6-a971-4dbe-8b49-2898d7f449d2-kube-api-access-p25kr\") pod \"nova-metadata-0\" (UID: \"fcf290a6-a971-4dbe-8b49-2898d7f449d2\") " pod="openstack/nova-metadata-0" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.715056 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcf290a6-a971-4dbe-8b49-2898d7f449d2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fcf290a6-a971-4dbe-8b49-2898d7f449d2\") " pod="openstack/nova-metadata-0" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.715693 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcf290a6-a971-4dbe-8b49-2898d7f449d2-logs\") pod \"nova-metadata-0\" (UID: \"fcf290a6-a971-4dbe-8b49-2898d7f449d2\") " pod="openstack/nova-metadata-0" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.719058 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/fcf290a6-a971-4dbe-8b49-2898d7f449d2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"fcf290a6-a971-4dbe-8b49-2898d7f449d2\") " pod="openstack/nova-metadata-0" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.720999 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcf290a6-a971-4dbe-8b49-2898d7f449d2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fcf290a6-a971-4dbe-8b49-2898d7f449d2\") " pod="openstack/nova-metadata-0" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.721728 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcf290a6-a971-4dbe-8b49-2898d7f449d2-config-data\") pod \"nova-metadata-0\" (UID: \"fcf290a6-a971-4dbe-8b49-2898d7f449d2\") " pod="openstack/nova-metadata-0" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.737371 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p25kr\" (UniqueName: \"kubernetes.io/projected/fcf290a6-a971-4dbe-8b49-2898d7f449d2-kube-api-access-p25kr\") pod \"nova-metadata-0\" (UID: \"fcf290a6-a971-4dbe-8b49-2898d7f449d2\") " pod="openstack/nova-metadata-0" Oct 08 07:35:44 crc kubenswrapper[4693]: I1008 07:35:44.846466 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 08 07:35:45 crc kubenswrapper[4693]: W1008 07:35:45.328773 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfcf290a6_a971_4dbe_8b49_2898d7f449d2.slice/crio-d25b90932535029ebca8659e44b7efee26ed7f3904e37a0fd6b991cbc20d9408 WatchSource:0}: Error finding container d25b90932535029ebca8659e44b7efee26ed7f3904e37a0fd6b991cbc20d9408: Status 404 returned error can't find the container with id d25b90932535029ebca8659e44b7efee26ed7f3904e37a0fd6b991cbc20d9408 Oct 08 07:35:45 crc kubenswrapper[4693]: I1008 07:35:45.332019 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 08 07:35:45 crc kubenswrapper[4693]: I1008 07:35:45.382665 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ae77465-4a61-4432-88fc-e0ac674efbf7" path="/var/lib/kubelet/pods/7ae77465-4a61-4432-88fc-e0ac674efbf7/volumes" Oct 08 07:35:45 crc kubenswrapper[4693]: I1008 07:35:45.431181 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fcf290a6-a971-4dbe-8b49-2898d7f449d2","Type":"ContainerStarted","Data":"d25b90932535029ebca8659e44b7efee26ed7f3904e37a0fd6b991cbc20d9408"} Oct 08 07:35:46 crc kubenswrapper[4693]: I1008 07:35:46.442133 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fcf290a6-a971-4dbe-8b49-2898d7f449d2","Type":"ContainerStarted","Data":"10e2b6d7e77f21d0e1e23d0aa316a3c7a56b188136b10d90f7d1d8bf3553be5f"} Oct 08 07:35:46 crc kubenswrapper[4693]: I1008 07:35:46.442378 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fcf290a6-a971-4dbe-8b49-2898d7f449d2","Type":"ContainerStarted","Data":"e6eaf763164ffea95ac502d06b3e16e17f00a2a1f69e9d29c44326baf63f466f"} Oct 08 07:35:46 crc kubenswrapper[4693]: I1008 07:35:46.475580 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.475553755 podStartE2EDuration="2.475553755s" podCreationTimestamp="2025-10-08 07:35:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:35:46.469115906 +0000 UTC m=+1131.840080841" watchObservedRunningTime="2025-10-08 07:35:46.475553755 +0000 UTC m=+1131.846518710" Oct 08 07:35:47 crc kubenswrapper[4693]: I1008 07:35:47.461073 4693 generic.go:334] "Generic (PLEG): container finished" podID="f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d" containerID="b77adafe28931ba499f21cd52f784a5e97252fbad0dbe0f6bb6ff3b6e4d5debe" exitCode=0 Oct 08 07:35:47 crc kubenswrapper[4693]: I1008 07:35:47.461192 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-pp99v" event={"ID":"f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d","Type":"ContainerDied","Data":"b77adafe28931ba499f21cd52f784a5e97252fbad0dbe0f6bb6ff3b6e4d5debe"} Oct 08 07:35:47 crc kubenswrapper[4693]: I1008 07:35:47.464874 4693 generic.go:334] "Generic (PLEG): container finished" podID="c7338453-7794-4bcd-8204-a189c93b7606" containerID="4a42a01382abaa6f0eff26fd468b5aaf43f96272176431a500b6c92b5123b8be" exitCode=0 Oct 08 07:35:47 crc kubenswrapper[4693]: I1008 07:35:47.464962 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-69qw5" event={"ID":"c7338453-7794-4bcd-8204-a189c93b7606","Type":"ContainerDied","Data":"4a42a01382abaa6f0eff26fd468b5aaf43f96272176431a500b6c92b5123b8be"} Oct 08 07:35:48 crc kubenswrapper[4693]: I1008 07:35:48.768432 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 08 07:35:48 crc kubenswrapper[4693]: I1008 07:35:48.768951 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 08 07:35:48 crc kubenswrapper[4693]: I1008 07:35:48.962087 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-pp99v" Oct 08 07:35:48 crc kubenswrapper[4693]: I1008 07:35:48.970414 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-69qw5" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.004509 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7338453-7794-4bcd-8204-a189c93b7606-config-data\") pod \"c7338453-7794-4bcd-8204-a189c93b7606\" (UID: \"c7338453-7794-4bcd-8204-a189c93b7606\") " Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.004614 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d-scripts\") pod \"f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d\" (UID: \"f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d\") " Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.004730 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64l87\" (UniqueName: \"kubernetes.io/projected/f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d-kube-api-access-64l87\") pod \"f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d\" (UID: \"f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d\") " Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.004762 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d-combined-ca-bundle\") pod \"f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d\" (UID: \"f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d\") " Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.004802 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d-config-data\") pod \"f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d\" (UID: \"f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d\") " Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.004868 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-psfns\" (UniqueName: \"kubernetes.io/projected/c7338453-7794-4bcd-8204-a189c93b7606-kube-api-access-psfns\") pod \"c7338453-7794-4bcd-8204-a189c93b7606\" (UID: \"c7338453-7794-4bcd-8204-a189c93b7606\") " Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.004915 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7338453-7794-4bcd-8204-a189c93b7606-scripts\") pod \"c7338453-7794-4bcd-8204-a189c93b7606\" (UID: \"c7338453-7794-4bcd-8204-a189c93b7606\") " Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.004944 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7338453-7794-4bcd-8204-a189c93b7606-combined-ca-bundle\") pod \"c7338453-7794-4bcd-8204-a189c93b7606\" (UID: \"c7338453-7794-4bcd-8204-a189c93b7606\") " Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.017658 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7338453-7794-4bcd-8204-a189c93b7606-kube-api-access-psfns" (OuterVolumeSpecName: "kube-api-access-psfns") pod "c7338453-7794-4bcd-8204-a189c93b7606" (UID: "c7338453-7794-4bcd-8204-a189c93b7606"). InnerVolumeSpecName "kube-api-access-psfns". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.018537 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7338453-7794-4bcd-8204-a189c93b7606-scripts" (OuterVolumeSpecName: "scripts") pod "c7338453-7794-4bcd-8204-a189c93b7606" (UID: "c7338453-7794-4bcd-8204-a189c93b7606"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.033143 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d-kube-api-access-64l87" (OuterVolumeSpecName: "kube-api-access-64l87") pod "f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d" (UID: "f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d"). InnerVolumeSpecName "kube-api-access-64l87". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.033280 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d-scripts" (OuterVolumeSpecName: "scripts") pod "f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d" (UID: "f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.055730 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d-config-data" (OuterVolumeSpecName: "config-data") pod "f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d" (UID: "f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.057439 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.060170 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d" (UID: "f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.060452 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7338453-7794-4bcd-8204-a189c93b7606-config-data" (OuterVolumeSpecName: "config-data") pod "c7338453-7794-4bcd-8204-a189c93b7606" (UID: "c7338453-7794-4bcd-8204-a189c93b7606"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.079877 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7338453-7794-4bcd-8204-a189c93b7606-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c7338453-7794-4bcd-8204-a189c93b7606" (UID: "c7338453-7794-4bcd-8204-a189c93b7606"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.084067 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-757b4f8459-s92d2" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.102179 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.111978 4693 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7338453-7794-4bcd-8204-a189c93b7606-scripts\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.112009 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7338453-7794-4bcd-8204-a189c93b7606-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.112020 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7338453-7794-4bcd-8204-a189c93b7606-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.112030 4693 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d-scripts\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.112039 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64l87\" (UniqueName: \"kubernetes.io/projected/f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d-kube-api-access-64l87\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.112061 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.112070 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.112078 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-psfns\" (UniqueName: \"kubernetes.io/projected/c7338453-7794-4bcd-8204-a189c93b7606-kube-api-access-psfns\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.163504 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-djbzk"] Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.163739 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" podUID="17141a5d-2e95-4670-9852-ef9ba4e6fb77" containerName="dnsmasq-dns" containerID="cri-o://e276d37c9a586c2227b56ecf8d80f65d6b6a4df5947a099b1d46c79b7e047c16" gracePeriod=10 Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.489620 4693 generic.go:334] "Generic (PLEG): container finished" podID="17141a5d-2e95-4670-9852-ef9ba4e6fb77" containerID="e276d37c9a586c2227b56ecf8d80f65d6b6a4df5947a099b1d46c79b7e047c16" exitCode=0 Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.489711 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" event={"ID":"17141a5d-2e95-4670-9852-ef9ba4e6fb77","Type":"ContainerDied","Data":"e276d37c9a586c2227b56ecf8d80f65d6b6a4df5947a099b1d46c79b7e047c16"} Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.491945 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-pp99v" event={"ID":"f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d","Type":"ContainerDied","Data":"9544d80c7822fe96f5867b5630022d97e290cadf32b0d65c3b068d9ab82a9575"} Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.492007 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9544d80c7822fe96f5867b5630022d97e290cadf32b0d65c3b068d9ab82a9575" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.492079 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-pp99v" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.494791 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-69qw5" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.494937 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-69qw5" event={"ID":"c7338453-7794-4bcd-8204-a189c93b7606","Type":"ContainerDied","Data":"980f668553fc4574da82a650d1afacb4773a771469aba751ccaf89519e59f2eb"} Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.494977 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="980f668553fc4574da82a650d1afacb4773a771469aba751ccaf89519e59f2eb" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.549369 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.551993 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.595016 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 08 07:35:49 crc kubenswrapper[4693]: E1008 07:35:49.595543 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d" containerName="nova-manage" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.595627 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d" containerName="nova-manage" Oct 08 07:35:49 crc kubenswrapper[4693]: E1008 07:35:49.595725 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7338453-7794-4bcd-8204-a189c93b7606" containerName="nova-cell1-conductor-db-sync" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.595776 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7338453-7794-4bcd-8204-a189c93b7606" containerName="nova-cell1-conductor-db-sync" Oct 08 07:35:49 crc kubenswrapper[4693]: E1008 07:35:49.595852 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17141a5d-2e95-4670-9852-ef9ba4e6fb77" containerName="init" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.595912 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="17141a5d-2e95-4670-9852-ef9ba4e6fb77" containerName="init" Oct 08 07:35:49 crc kubenswrapper[4693]: E1008 07:35:49.595967 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17141a5d-2e95-4670-9852-ef9ba4e6fb77" containerName="dnsmasq-dns" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.596014 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="17141a5d-2e95-4670-9852-ef9ba4e6fb77" containerName="dnsmasq-dns" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.596244 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="17141a5d-2e95-4670-9852-ef9ba4e6fb77" containerName="dnsmasq-dns" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.596316 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d" containerName="nova-manage" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.596377 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7338453-7794-4bcd-8204-a189c93b7606" containerName="nova-cell1-conductor-db-sync" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.597005 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.599383 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.619775 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-config\") pod \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\" (UID: \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\") " Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.619848 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-dns-swift-storage-0\") pod \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\" (UID: \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\") " Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.619943 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-ovsdbserver-nb\") pod \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\" (UID: \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\") " Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.619970 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-dns-svc\") pod \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\" (UID: \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\") " Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.620043 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-49slt\" (UniqueName: \"kubernetes.io/projected/17141a5d-2e95-4670-9852-ef9ba4e6fb77-kube-api-access-49slt\") pod \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\" (UID: \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\") " Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.620107 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-ovsdbserver-sb\") pod \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\" (UID: \"17141a5d-2e95-4670-9852-ef9ba4e6fb77\") " Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.637121 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17141a5d-2e95-4670-9852-ef9ba4e6fb77-kube-api-access-49slt" (OuterVolumeSpecName: "kube-api-access-49slt") pod "17141a5d-2e95-4670-9852-ef9ba4e6fb77" (UID: "17141a5d-2e95-4670-9852-ef9ba4e6fb77"). InnerVolumeSpecName "kube-api-access-49slt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.644079 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.678663 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "17141a5d-2e95-4670-9852-ef9ba4e6fb77" (UID: "17141a5d-2e95-4670-9852-ef9ba4e6fb77"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.685758 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "17141a5d-2e95-4670-9852-ef9ba4e6fb77" (UID: "17141a5d-2e95-4670-9852-ef9ba4e6fb77"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.686919 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-config" (OuterVolumeSpecName: "config") pod "17141a5d-2e95-4670-9852-ef9ba4e6fb77" (UID: "17141a5d-2e95-4670-9852-ef9ba4e6fb77"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.704523 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "17141a5d-2e95-4670-9852-ef9ba4e6fb77" (UID: "17141a5d-2e95-4670-9852-ef9ba4e6fb77"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.705372 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "17141a5d-2e95-4670-9852-ef9ba4e6fb77" (UID: "17141a5d-2e95-4670-9852-ef9ba4e6fb77"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.722298 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5x7gt\" (UniqueName: \"kubernetes.io/projected/2b15b479-b3e9-4af4-bb60-3f6ca0ed053e-kube-api-access-5x7gt\") pod \"nova-cell1-conductor-0\" (UID: \"2b15b479-b3e9-4af4-bb60-3f6ca0ed053e\") " pod="openstack/nova-cell1-conductor-0" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.722368 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b15b479-b3e9-4af4-bb60-3f6ca0ed053e-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"2b15b479-b3e9-4af4-bb60-3f6ca0ed053e\") " pod="openstack/nova-cell1-conductor-0" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.722390 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b15b479-b3e9-4af4-bb60-3f6ca0ed053e-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"2b15b479-b3e9-4af4-bb60-3f6ca0ed053e\") " pod="openstack/nova-cell1-conductor-0" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.722448 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-49slt\" (UniqueName: \"kubernetes.io/projected/17141a5d-2e95-4670-9852-ef9ba4e6fb77-kube-api-access-49slt\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.722459 4693 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.722469 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.722478 4693 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.722486 4693 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.722495 4693 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/17141a5d-2e95-4670-9852-ef9ba4e6fb77-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.724748 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.725023 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="0b3434ec-c2c8-47cf-a2e4-dc61725564d7" containerName="nova-api-log" containerID="cri-o://ed18afc701feb69fe62c60af4cacc371b91e159357d41d3301318be9d2e67554" gracePeriod=30 Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.725315 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="0b3434ec-c2c8-47cf-a2e4-dc61725564d7" containerName="nova-api-api" containerID="cri-o://9c9df325479f33b98f1bd9ef16397c253b77e71fc52d512daa665b9ae419a7c9" gracePeriod=30 Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.732923 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="0b3434ec-c2c8-47cf-a2e4-dc61725564d7" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.184:8774/\": EOF" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.732938 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="0b3434ec-c2c8-47cf-a2e4-dc61725564d7" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.184:8774/\": EOF" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.744257 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.744481 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="fcf290a6-a971-4dbe-8b49-2898d7f449d2" containerName="nova-metadata-log" containerID="cri-o://e6eaf763164ffea95ac502d06b3e16e17f00a2a1f69e9d29c44326baf63f466f" gracePeriod=30 Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.744614 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="fcf290a6-a971-4dbe-8b49-2898d7f449d2" containerName="nova-metadata-metadata" containerID="cri-o://10e2b6d7e77f21d0e1e23d0aa316a3c7a56b188136b10d90f7d1d8bf3553be5f" gracePeriod=30 Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.824225 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5x7gt\" (UniqueName: \"kubernetes.io/projected/2b15b479-b3e9-4af4-bb60-3f6ca0ed053e-kube-api-access-5x7gt\") pod \"nova-cell1-conductor-0\" (UID: \"2b15b479-b3e9-4af4-bb60-3f6ca0ed053e\") " pod="openstack/nova-cell1-conductor-0" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.824310 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b15b479-b3e9-4af4-bb60-3f6ca0ed053e-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"2b15b479-b3e9-4af4-bb60-3f6ca0ed053e\") " pod="openstack/nova-cell1-conductor-0" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.824332 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b15b479-b3e9-4af4-bb60-3f6ca0ed053e-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"2b15b479-b3e9-4af4-bb60-3f6ca0ed053e\") " pod="openstack/nova-cell1-conductor-0" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.827565 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b15b479-b3e9-4af4-bb60-3f6ca0ed053e-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"2b15b479-b3e9-4af4-bb60-3f6ca0ed053e\") " pod="openstack/nova-cell1-conductor-0" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.827774 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b15b479-b3e9-4af4-bb60-3f6ca0ed053e-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"2b15b479-b3e9-4af4-bb60-3f6ca0ed053e\") " pod="openstack/nova-cell1-conductor-0" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.843271 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5x7gt\" (UniqueName: \"kubernetes.io/projected/2b15b479-b3e9-4af4-bb60-3f6ca0ed053e-kube-api-access-5x7gt\") pod \"nova-cell1-conductor-0\" (UID: \"2b15b479-b3e9-4af4-bb60-3f6ca0ed053e\") " pod="openstack/nova-cell1-conductor-0" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.846872 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.846912 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 08 07:35:49 crc kubenswrapper[4693]: I1008 07:35:49.919905 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.054515 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.382911 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.387021 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.445768 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/fcf290a6-a971-4dbe-8b49-2898d7f449d2-nova-metadata-tls-certs\") pod \"fcf290a6-a971-4dbe-8b49-2898d7f449d2\" (UID: \"fcf290a6-a971-4dbe-8b49-2898d7f449d2\") " Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.445962 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcf290a6-a971-4dbe-8b49-2898d7f449d2-combined-ca-bundle\") pod \"fcf290a6-a971-4dbe-8b49-2898d7f449d2\" (UID: \"fcf290a6-a971-4dbe-8b49-2898d7f449d2\") " Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.446072 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcf290a6-a971-4dbe-8b49-2898d7f449d2-config-data\") pod \"fcf290a6-a971-4dbe-8b49-2898d7f449d2\" (UID: \"fcf290a6-a971-4dbe-8b49-2898d7f449d2\") " Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.446101 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p25kr\" (UniqueName: \"kubernetes.io/projected/fcf290a6-a971-4dbe-8b49-2898d7f449d2-kube-api-access-p25kr\") pod \"fcf290a6-a971-4dbe-8b49-2898d7f449d2\" (UID: \"fcf290a6-a971-4dbe-8b49-2898d7f449d2\") " Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.446206 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcf290a6-a971-4dbe-8b49-2898d7f449d2-logs\") pod \"fcf290a6-a971-4dbe-8b49-2898d7f449d2\" (UID: \"fcf290a6-a971-4dbe-8b49-2898d7f449d2\") " Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.447333 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fcf290a6-a971-4dbe-8b49-2898d7f449d2-logs" (OuterVolumeSpecName: "logs") pod "fcf290a6-a971-4dbe-8b49-2898d7f449d2" (UID: "fcf290a6-a971-4dbe-8b49-2898d7f449d2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.449716 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcf290a6-a971-4dbe-8b49-2898d7f449d2-kube-api-access-p25kr" (OuterVolumeSpecName: "kube-api-access-p25kr") pod "fcf290a6-a971-4dbe-8b49-2898d7f449d2" (UID: "fcf290a6-a971-4dbe-8b49-2898d7f449d2"). InnerVolumeSpecName "kube-api-access-p25kr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.477943 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcf290a6-a971-4dbe-8b49-2898d7f449d2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fcf290a6-a971-4dbe-8b49-2898d7f449d2" (UID: "fcf290a6-a971-4dbe-8b49-2898d7f449d2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.481090 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcf290a6-a971-4dbe-8b49-2898d7f449d2-config-data" (OuterVolumeSpecName: "config-data") pod "fcf290a6-a971-4dbe-8b49-2898d7f449d2" (UID: "fcf290a6-a971-4dbe-8b49-2898d7f449d2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.515142 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcf290a6-a971-4dbe-8b49-2898d7f449d2-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "fcf290a6-a971-4dbe-8b49-2898d7f449d2" (UID: "fcf290a6-a971-4dbe-8b49-2898d7f449d2"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.522708 4693 generic.go:334] "Generic (PLEG): container finished" podID="0b3434ec-c2c8-47cf-a2e4-dc61725564d7" containerID="ed18afc701feb69fe62c60af4cacc371b91e159357d41d3301318be9d2e67554" exitCode=143 Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.522760 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0b3434ec-c2c8-47cf-a2e4-dc61725564d7","Type":"ContainerDied","Data":"ed18afc701feb69fe62c60af4cacc371b91e159357d41d3301318be9d2e67554"} Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.525948 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" event={"ID":"17141a5d-2e95-4670-9852-ef9ba4e6fb77","Type":"ContainerDied","Data":"0513da7ac1d68f8baac8bb49f5df1f5a14e91fa89bbf04bbf71856b753002fae"} Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.526066 4693 scope.go:117] "RemoveContainer" containerID="e276d37c9a586c2227b56ecf8d80f65d6b6a4df5947a099b1d46c79b7e047c16" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.526303 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-djbzk" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.535716 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"2b15b479-b3e9-4af4-bb60-3f6ca0ed053e","Type":"ContainerStarted","Data":"8c04099943995034796ffc4c9fe02c2e304bdae68c90ebbc7090f5eff63dd90c"} Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.545486 4693 generic.go:334] "Generic (PLEG): container finished" podID="fcf290a6-a971-4dbe-8b49-2898d7f449d2" containerID="10e2b6d7e77f21d0e1e23d0aa316a3c7a56b188136b10d90f7d1d8bf3553be5f" exitCode=0 Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.545518 4693 generic.go:334] "Generic (PLEG): container finished" podID="fcf290a6-a971-4dbe-8b49-2898d7f449d2" containerID="e6eaf763164ffea95ac502d06b3e16e17f00a2a1f69e9d29c44326baf63f466f" exitCode=143 Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.545531 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.545535 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fcf290a6-a971-4dbe-8b49-2898d7f449d2","Type":"ContainerDied","Data":"10e2b6d7e77f21d0e1e23d0aa316a3c7a56b188136b10d90f7d1d8bf3553be5f"} Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.545570 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fcf290a6-a971-4dbe-8b49-2898d7f449d2","Type":"ContainerDied","Data":"e6eaf763164ffea95ac502d06b3e16e17f00a2a1f69e9d29c44326baf63f466f"} Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.545586 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fcf290a6-a971-4dbe-8b49-2898d7f449d2","Type":"ContainerDied","Data":"d25b90932535029ebca8659e44b7efee26ed7f3904e37a0fd6b991cbc20d9408"} Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.547892 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcf290a6-a971-4dbe-8b49-2898d7f449d2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.547910 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcf290a6-a971-4dbe-8b49-2898d7f449d2-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.547919 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p25kr\" (UniqueName: \"kubernetes.io/projected/fcf290a6-a971-4dbe-8b49-2898d7f449d2-kube-api-access-p25kr\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.547928 4693 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcf290a6-a971-4dbe-8b49-2898d7f449d2-logs\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.547936 4693 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/fcf290a6-a971-4dbe-8b49-2898d7f449d2-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.564091 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-djbzk"] Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.570621 4693 scope.go:117] "RemoveContainer" containerID="ada8cadfe9172ba72d823ca5bba6db93adb347eb8642aac1c9df98bc9f08398a" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.573637 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-djbzk"] Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.604281 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.604594 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.615561 4693 scope.go:117] "RemoveContainer" containerID="10e2b6d7e77f21d0e1e23d0aa316a3c7a56b188136b10d90f7d1d8bf3553be5f" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.627662 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 08 07:35:50 crc kubenswrapper[4693]: E1008 07:35:50.628057 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcf290a6-a971-4dbe-8b49-2898d7f449d2" containerName="nova-metadata-metadata" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.628074 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcf290a6-a971-4dbe-8b49-2898d7f449d2" containerName="nova-metadata-metadata" Oct 08 07:35:50 crc kubenswrapper[4693]: E1008 07:35:50.628106 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcf290a6-a971-4dbe-8b49-2898d7f449d2" containerName="nova-metadata-log" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.628113 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcf290a6-a971-4dbe-8b49-2898d7f449d2" containerName="nova-metadata-log" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.628268 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcf290a6-a971-4dbe-8b49-2898d7f449d2" containerName="nova-metadata-log" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.628294 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcf290a6-a971-4dbe-8b49-2898d7f449d2" containerName="nova-metadata-metadata" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.629247 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.631698 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.635545 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.638420 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.664745 4693 scope.go:117] "RemoveContainer" containerID="e6eaf763164ffea95ac502d06b3e16e17f00a2a1f69e9d29c44326baf63f466f" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.683444 4693 scope.go:117] "RemoveContainer" containerID="10e2b6d7e77f21d0e1e23d0aa316a3c7a56b188136b10d90f7d1d8bf3553be5f" Oct 08 07:35:50 crc kubenswrapper[4693]: E1008 07:35:50.683742 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10e2b6d7e77f21d0e1e23d0aa316a3c7a56b188136b10d90f7d1d8bf3553be5f\": container with ID starting with 10e2b6d7e77f21d0e1e23d0aa316a3c7a56b188136b10d90f7d1d8bf3553be5f not found: ID does not exist" containerID="10e2b6d7e77f21d0e1e23d0aa316a3c7a56b188136b10d90f7d1d8bf3553be5f" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.683768 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10e2b6d7e77f21d0e1e23d0aa316a3c7a56b188136b10d90f7d1d8bf3553be5f"} err="failed to get container status \"10e2b6d7e77f21d0e1e23d0aa316a3c7a56b188136b10d90f7d1d8bf3553be5f\": rpc error: code = NotFound desc = could not find container \"10e2b6d7e77f21d0e1e23d0aa316a3c7a56b188136b10d90f7d1d8bf3553be5f\": container with ID starting with 10e2b6d7e77f21d0e1e23d0aa316a3c7a56b188136b10d90f7d1d8bf3553be5f not found: ID does not exist" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.683787 4693 scope.go:117] "RemoveContainer" containerID="e6eaf763164ffea95ac502d06b3e16e17f00a2a1f69e9d29c44326baf63f466f" Oct 08 07:35:50 crc kubenswrapper[4693]: E1008 07:35:50.684028 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6eaf763164ffea95ac502d06b3e16e17f00a2a1f69e9d29c44326baf63f466f\": container with ID starting with e6eaf763164ffea95ac502d06b3e16e17f00a2a1f69e9d29c44326baf63f466f not found: ID does not exist" containerID="e6eaf763164ffea95ac502d06b3e16e17f00a2a1f69e9d29c44326baf63f466f" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.684050 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6eaf763164ffea95ac502d06b3e16e17f00a2a1f69e9d29c44326baf63f466f"} err="failed to get container status \"e6eaf763164ffea95ac502d06b3e16e17f00a2a1f69e9d29c44326baf63f466f\": rpc error: code = NotFound desc = could not find container \"e6eaf763164ffea95ac502d06b3e16e17f00a2a1f69e9d29c44326baf63f466f\": container with ID starting with e6eaf763164ffea95ac502d06b3e16e17f00a2a1f69e9d29c44326baf63f466f not found: ID does not exist" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.684064 4693 scope.go:117] "RemoveContainer" containerID="10e2b6d7e77f21d0e1e23d0aa316a3c7a56b188136b10d90f7d1d8bf3553be5f" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.684289 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10e2b6d7e77f21d0e1e23d0aa316a3c7a56b188136b10d90f7d1d8bf3553be5f"} err="failed to get container status \"10e2b6d7e77f21d0e1e23d0aa316a3c7a56b188136b10d90f7d1d8bf3553be5f\": rpc error: code = NotFound desc = could not find container \"10e2b6d7e77f21d0e1e23d0aa316a3c7a56b188136b10d90f7d1d8bf3553be5f\": container with ID starting with 10e2b6d7e77f21d0e1e23d0aa316a3c7a56b188136b10d90f7d1d8bf3553be5f not found: ID does not exist" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.684346 4693 scope.go:117] "RemoveContainer" containerID="e6eaf763164ffea95ac502d06b3e16e17f00a2a1f69e9d29c44326baf63f466f" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.684679 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6eaf763164ffea95ac502d06b3e16e17f00a2a1f69e9d29c44326baf63f466f"} err="failed to get container status \"e6eaf763164ffea95ac502d06b3e16e17f00a2a1f69e9d29c44326baf63f466f\": rpc error: code = NotFound desc = could not find container \"e6eaf763164ffea95ac502d06b3e16e17f00a2a1f69e9d29c44326baf63f466f\": container with ID starting with e6eaf763164ffea95ac502d06b3e16e17f00a2a1f69e9d29c44326baf63f466f not found: ID does not exist" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.751640 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/065b63c2-611d-4461-bc50-4cfb9e120bba-logs\") pod \"nova-metadata-0\" (UID: \"065b63c2-611d-4461-bc50-4cfb9e120bba\") " pod="openstack/nova-metadata-0" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.751907 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/065b63c2-611d-4461-bc50-4cfb9e120bba-config-data\") pod \"nova-metadata-0\" (UID: \"065b63c2-611d-4461-bc50-4cfb9e120bba\") " pod="openstack/nova-metadata-0" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.752098 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/065b63c2-611d-4461-bc50-4cfb9e120bba-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"065b63c2-611d-4461-bc50-4cfb9e120bba\") " pod="openstack/nova-metadata-0" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.752181 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-srtxw\" (UniqueName: \"kubernetes.io/projected/065b63c2-611d-4461-bc50-4cfb9e120bba-kube-api-access-srtxw\") pod \"nova-metadata-0\" (UID: \"065b63c2-611d-4461-bc50-4cfb9e120bba\") " pod="openstack/nova-metadata-0" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.752283 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/065b63c2-611d-4461-bc50-4cfb9e120bba-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"065b63c2-611d-4461-bc50-4cfb9e120bba\") " pod="openstack/nova-metadata-0" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.853999 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/065b63c2-611d-4461-bc50-4cfb9e120bba-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"065b63c2-611d-4461-bc50-4cfb9e120bba\") " pod="openstack/nova-metadata-0" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.854048 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-srtxw\" (UniqueName: \"kubernetes.io/projected/065b63c2-611d-4461-bc50-4cfb9e120bba-kube-api-access-srtxw\") pod \"nova-metadata-0\" (UID: \"065b63c2-611d-4461-bc50-4cfb9e120bba\") " pod="openstack/nova-metadata-0" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.854087 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/065b63c2-611d-4461-bc50-4cfb9e120bba-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"065b63c2-611d-4461-bc50-4cfb9e120bba\") " pod="openstack/nova-metadata-0" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.854110 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/065b63c2-611d-4461-bc50-4cfb9e120bba-logs\") pod \"nova-metadata-0\" (UID: \"065b63c2-611d-4461-bc50-4cfb9e120bba\") " pod="openstack/nova-metadata-0" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.854130 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/065b63c2-611d-4461-bc50-4cfb9e120bba-config-data\") pod \"nova-metadata-0\" (UID: \"065b63c2-611d-4461-bc50-4cfb9e120bba\") " pod="openstack/nova-metadata-0" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.855232 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/065b63c2-611d-4461-bc50-4cfb9e120bba-logs\") pod \"nova-metadata-0\" (UID: \"065b63c2-611d-4461-bc50-4cfb9e120bba\") " pod="openstack/nova-metadata-0" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.858792 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/065b63c2-611d-4461-bc50-4cfb9e120bba-config-data\") pod \"nova-metadata-0\" (UID: \"065b63c2-611d-4461-bc50-4cfb9e120bba\") " pod="openstack/nova-metadata-0" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.859161 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/065b63c2-611d-4461-bc50-4cfb9e120bba-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"065b63c2-611d-4461-bc50-4cfb9e120bba\") " pod="openstack/nova-metadata-0" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.865775 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/065b63c2-611d-4461-bc50-4cfb9e120bba-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"065b63c2-611d-4461-bc50-4cfb9e120bba\") " pod="openstack/nova-metadata-0" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.873505 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-srtxw\" (UniqueName: \"kubernetes.io/projected/065b63c2-611d-4461-bc50-4cfb9e120bba-kube-api-access-srtxw\") pod \"nova-metadata-0\" (UID: \"065b63c2-611d-4461-bc50-4cfb9e120bba\") " pod="openstack/nova-metadata-0" Oct 08 07:35:50 crc kubenswrapper[4693]: I1008 07:35:50.944244 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 08 07:35:51 crc kubenswrapper[4693]: I1008 07:35:51.382635 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17141a5d-2e95-4670-9852-ef9ba4e6fb77" path="/var/lib/kubelet/pods/17141a5d-2e95-4670-9852-ef9ba4e6fb77/volumes" Oct 08 07:35:51 crc kubenswrapper[4693]: I1008 07:35:51.384370 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcf290a6-a971-4dbe-8b49-2898d7f449d2" path="/var/lib/kubelet/pods/fcf290a6-a971-4dbe-8b49-2898d7f449d2/volumes" Oct 08 07:35:51 crc kubenswrapper[4693]: I1008 07:35:51.397701 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 08 07:35:51 crc kubenswrapper[4693]: W1008 07:35:51.407030 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod065b63c2_611d_4461_bc50_4cfb9e120bba.slice/crio-e971e8efc62f1b956e5813ab7d124060abca0d828ed64e03c7cac2dd5815f020 WatchSource:0}: Error finding container e971e8efc62f1b956e5813ab7d124060abca0d828ed64e03c7cac2dd5815f020: Status 404 returned error can't find the container with id e971e8efc62f1b956e5813ab7d124060abca0d828ed64e03c7cac2dd5815f020 Oct 08 07:35:51 crc kubenswrapper[4693]: I1008 07:35:51.572637 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"065b63c2-611d-4461-bc50-4cfb9e120bba","Type":"ContainerStarted","Data":"e971e8efc62f1b956e5813ab7d124060abca0d828ed64e03c7cac2dd5815f020"} Oct 08 07:35:51 crc kubenswrapper[4693]: I1008 07:35:51.578221 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="2133c260-e214-428f-8f00-92d29d84594e" containerName="nova-scheduler-scheduler" containerID="cri-o://7b4e11bdecbe9cb03c431fa5494caa069173285462521af71d37d886344fb4eb" gracePeriod=30 Oct 08 07:35:51 crc kubenswrapper[4693]: I1008 07:35:51.578524 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"2b15b479-b3e9-4af4-bb60-3f6ca0ed053e","Type":"ContainerStarted","Data":"38fac97bf7d23d3706633bb9980e946276f5a7ea41306fb2e349f2f3b999e6f4"} Oct 08 07:35:51 crc kubenswrapper[4693]: I1008 07:35:51.578923 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 08 07:35:51 crc kubenswrapper[4693]: I1008 07:35:51.596001 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.5959577769999997 podStartE2EDuration="2.595957777s" podCreationTimestamp="2025-10-08 07:35:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:35:51.594923223 +0000 UTC m=+1136.965888168" watchObservedRunningTime="2025-10-08 07:35:51.595957777 +0000 UTC m=+1136.966922742" Oct 08 07:35:52 crc kubenswrapper[4693]: I1008 07:35:52.590130 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"065b63c2-611d-4461-bc50-4cfb9e120bba","Type":"ContainerStarted","Data":"b950b5dbd41755f767ce40162cf5b8d27a6e8ca910b770e5ca5e8e84825e0dff"} Oct 08 07:35:52 crc kubenswrapper[4693]: I1008 07:35:52.590861 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"065b63c2-611d-4461-bc50-4cfb9e120bba","Type":"ContainerStarted","Data":"fa1cafcf02fa231f92aa844a6c11a252c8b6599a132832ac6985037fc30abf63"} Oct 08 07:35:52 crc kubenswrapper[4693]: I1008 07:35:52.619224 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.6191990560000002 podStartE2EDuration="2.619199056s" podCreationTimestamp="2025-10-08 07:35:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:35:52.606772908 +0000 UTC m=+1137.977737853" watchObservedRunningTime="2025-10-08 07:35:52.619199056 +0000 UTC m=+1137.990164011" Oct 08 07:35:54 crc kubenswrapper[4693]: E1008 07:35:54.059533 4693 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7b4e11bdecbe9cb03c431fa5494caa069173285462521af71d37d886344fb4eb" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 08 07:35:54 crc kubenswrapper[4693]: E1008 07:35:54.061290 4693 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7b4e11bdecbe9cb03c431fa5494caa069173285462521af71d37d886344fb4eb is running failed: container process not found" containerID="7b4e11bdecbe9cb03c431fa5494caa069173285462521af71d37d886344fb4eb" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 08 07:35:54 crc kubenswrapper[4693]: E1008 07:35:54.061960 4693 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7b4e11bdecbe9cb03c431fa5494caa069173285462521af71d37d886344fb4eb is running failed: container process not found" containerID="7b4e11bdecbe9cb03c431fa5494caa069173285462521af71d37d886344fb4eb" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 08 07:35:54 crc kubenswrapper[4693]: E1008 07:35:54.062051 4693 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7b4e11bdecbe9cb03c431fa5494caa069173285462521af71d37d886344fb4eb is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="2133c260-e214-428f-8f00-92d29d84594e" containerName="nova-scheduler-scheduler" Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.459704 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.501582 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.523552 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2133c260-e214-428f-8f00-92d29d84594e-combined-ca-bundle\") pod \"2133c260-e214-428f-8f00-92d29d84594e\" (UID: \"2133c260-e214-428f-8f00-92d29d84594e\") " Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.523672 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c9rzs\" (UniqueName: \"kubernetes.io/projected/2133c260-e214-428f-8f00-92d29d84594e-kube-api-access-c9rzs\") pod \"2133c260-e214-428f-8f00-92d29d84594e\" (UID: \"2133c260-e214-428f-8f00-92d29d84594e\") " Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.523915 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2133c260-e214-428f-8f00-92d29d84594e-config-data\") pod \"2133c260-e214-428f-8f00-92d29d84594e\" (UID: \"2133c260-e214-428f-8f00-92d29d84594e\") " Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.535829 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2133c260-e214-428f-8f00-92d29d84594e-kube-api-access-c9rzs" (OuterVolumeSpecName: "kube-api-access-c9rzs") pod "2133c260-e214-428f-8f00-92d29d84594e" (UID: "2133c260-e214-428f-8f00-92d29d84594e"). InnerVolumeSpecName "kube-api-access-c9rzs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.559942 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2133c260-e214-428f-8f00-92d29d84594e-config-data" (OuterVolumeSpecName: "config-data") pod "2133c260-e214-428f-8f00-92d29d84594e" (UID: "2133c260-e214-428f-8f00-92d29d84594e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.573952 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2133c260-e214-428f-8f00-92d29d84594e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2133c260-e214-428f-8f00-92d29d84594e" (UID: "2133c260-e214-428f-8f00-92d29d84594e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.612935 4693 generic.go:334] "Generic (PLEG): container finished" podID="2133c260-e214-428f-8f00-92d29d84594e" containerID="7b4e11bdecbe9cb03c431fa5494caa069173285462521af71d37d886344fb4eb" exitCode=0 Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.613015 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2133c260-e214-428f-8f00-92d29d84594e","Type":"ContainerDied","Data":"7b4e11bdecbe9cb03c431fa5494caa069173285462521af71d37d886344fb4eb"} Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.613041 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2133c260-e214-428f-8f00-92d29d84594e","Type":"ContainerDied","Data":"ccd515f86f6307530399aa33d74588bf93202a65cc4a1dd8116303f3f0a912f2"} Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.613057 4693 scope.go:117] "RemoveContainer" containerID="7b4e11bdecbe9cb03c431fa5494caa069173285462521af71d37d886344fb4eb" Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.613123 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.626067 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2133c260-e214-428f-8f00-92d29d84594e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.626101 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c9rzs\" (UniqueName: \"kubernetes.io/projected/2133c260-e214-428f-8f00-92d29d84594e-kube-api-access-c9rzs\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.626115 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2133c260-e214-428f-8f00-92d29d84594e-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.649875 4693 scope.go:117] "RemoveContainer" containerID="7b4e11bdecbe9cb03c431fa5494caa069173285462521af71d37d886344fb4eb" Oct 08 07:35:54 crc kubenswrapper[4693]: E1008 07:35:54.652295 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b4e11bdecbe9cb03c431fa5494caa069173285462521af71d37d886344fb4eb\": container with ID starting with 7b4e11bdecbe9cb03c431fa5494caa069173285462521af71d37d886344fb4eb not found: ID does not exist" containerID="7b4e11bdecbe9cb03c431fa5494caa069173285462521af71d37d886344fb4eb" Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.652347 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b4e11bdecbe9cb03c431fa5494caa069173285462521af71d37d886344fb4eb"} err="failed to get container status \"7b4e11bdecbe9cb03c431fa5494caa069173285462521af71d37d886344fb4eb\": rpc error: code = NotFound desc = could not find container \"7b4e11bdecbe9cb03c431fa5494caa069173285462521af71d37d886344fb4eb\": container with ID starting with 7b4e11bdecbe9cb03c431fa5494caa069173285462521af71d37d886344fb4eb not found: ID does not exist" Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.654068 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.670968 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.680457 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 08 07:35:54 crc kubenswrapper[4693]: E1008 07:35:54.681016 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2133c260-e214-428f-8f00-92d29d84594e" containerName="nova-scheduler-scheduler" Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.681036 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="2133c260-e214-428f-8f00-92d29d84594e" containerName="nova-scheduler-scheduler" Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.681240 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="2133c260-e214-428f-8f00-92d29d84594e" containerName="nova-scheduler-scheduler" Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.682033 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.687240 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.687524 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.727529 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9e121a7-a4c1-4813-a55f-c0579fa72459-config-data\") pod \"nova-scheduler-0\" (UID: \"c9e121a7-a4c1-4813-a55f-c0579fa72459\") " pod="openstack/nova-scheduler-0" Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.727803 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cknxw\" (UniqueName: \"kubernetes.io/projected/c9e121a7-a4c1-4813-a55f-c0579fa72459-kube-api-access-cknxw\") pod \"nova-scheduler-0\" (UID: \"c9e121a7-a4c1-4813-a55f-c0579fa72459\") " pod="openstack/nova-scheduler-0" Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.727971 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9e121a7-a4c1-4813-a55f-c0579fa72459-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c9e121a7-a4c1-4813-a55f-c0579fa72459\") " pod="openstack/nova-scheduler-0" Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.829797 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9e121a7-a4c1-4813-a55f-c0579fa72459-config-data\") pod \"nova-scheduler-0\" (UID: \"c9e121a7-a4c1-4813-a55f-c0579fa72459\") " pod="openstack/nova-scheduler-0" Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.829889 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cknxw\" (UniqueName: \"kubernetes.io/projected/c9e121a7-a4c1-4813-a55f-c0579fa72459-kube-api-access-cknxw\") pod \"nova-scheduler-0\" (UID: \"c9e121a7-a4c1-4813-a55f-c0579fa72459\") " pod="openstack/nova-scheduler-0" Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.829936 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9e121a7-a4c1-4813-a55f-c0579fa72459-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c9e121a7-a4c1-4813-a55f-c0579fa72459\") " pod="openstack/nova-scheduler-0" Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.833759 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9e121a7-a4c1-4813-a55f-c0579fa72459-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c9e121a7-a4c1-4813-a55f-c0579fa72459\") " pod="openstack/nova-scheduler-0" Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.836708 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9e121a7-a4c1-4813-a55f-c0579fa72459-config-data\") pod \"nova-scheduler-0\" (UID: \"c9e121a7-a4c1-4813-a55f-c0579fa72459\") " pod="openstack/nova-scheduler-0" Oct 08 07:35:54 crc kubenswrapper[4693]: I1008 07:35:54.855934 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cknxw\" (UniqueName: \"kubernetes.io/projected/c9e121a7-a4c1-4813-a55f-c0579fa72459-kube-api-access-cknxw\") pod \"nova-scheduler-0\" (UID: \"c9e121a7-a4c1-4813-a55f-c0579fa72459\") " pod="openstack/nova-scheduler-0" Oct 08 07:35:55 crc kubenswrapper[4693]: I1008 07:35:55.005863 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 08 07:35:55 crc kubenswrapper[4693]: I1008 07:35:55.383474 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2133c260-e214-428f-8f00-92d29d84594e" path="/var/lib/kubelet/pods/2133c260-e214-428f-8f00-92d29d84594e/volumes" Oct 08 07:35:55 crc kubenswrapper[4693]: I1008 07:35:55.587637 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 08 07:35:55 crc kubenswrapper[4693]: I1008 07:35:55.636292 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c9e121a7-a4c1-4813-a55f-c0579fa72459","Type":"ContainerStarted","Data":"40f092bbb099c96163ed72df0af1d29748c37b7811a41bdccfe2deda9b23b9c6"} Oct 08 07:35:55 crc kubenswrapper[4693]: I1008 07:35:55.638133 4693 generic.go:334] "Generic (PLEG): container finished" podID="0b3434ec-c2c8-47cf-a2e4-dc61725564d7" containerID="9c9df325479f33b98f1bd9ef16397c253b77e71fc52d512daa665b9ae419a7c9" exitCode=0 Oct 08 07:35:55 crc kubenswrapper[4693]: I1008 07:35:55.638166 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0b3434ec-c2c8-47cf-a2e4-dc61725564d7","Type":"ContainerDied","Data":"9c9df325479f33b98f1bd9ef16397c253b77e71fc52d512daa665b9ae419a7c9"} Oct 08 07:35:55 crc kubenswrapper[4693]: I1008 07:35:55.740903 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 08 07:35:55 crc kubenswrapper[4693]: I1008 07:35:55.854272 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b3434ec-c2c8-47cf-a2e4-dc61725564d7-config-data\") pod \"0b3434ec-c2c8-47cf-a2e4-dc61725564d7\" (UID: \"0b3434ec-c2c8-47cf-a2e4-dc61725564d7\") " Oct 08 07:35:55 crc kubenswrapper[4693]: I1008 07:35:55.854916 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b3434ec-c2c8-47cf-a2e4-dc61725564d7-combined-ca-bundle\") pod \"0b3434ec-c2c8-47cf-a2e4-dc61725564d7\" (UID: \"0b3434ec-c2c8-47cf-a2e4-dc61725564d7\") " Oct 08 07:35:55 crc kubenswrapper[4693]: I1008 07:35:55.855340 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b3434ec-c2c8-47cf-a2e4-dc61725564d7-logs\") pod \"0b3434ec-c2c8-47cf-a2e4-dc61725564d7\" (UID: \"0b3434ec-c2c8-47cf-a2e4-dc61725564d7\") " Oct 08 07:35:55 crc kubenswrapper[4693]: I1008 07:35:55.855454 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qt7h\" (UniqueName: \"kubernetes.io/projected/0b3434ec-c2c8-47cf-a2e4-dc61725564d7-kube-api-access-6qt7h\") pod \"0b3434ec-c2c8-47cf-a2e4-dc61725564d7\" (UID: \"0b3434ec-c2c8-47cf-a2e4-dc61725564d7\") " Oct 08 07:35:55 crc kubenswrapper[4693]: I1008 07:35:55.855872 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b3434ec-c2c8-47cf-a2e4-dc61725564d7-logs" (OuterVolumeSpecName: "logs") pod "0b3434ec-c2c8-47cf-a2e4-dc61725564d7" (UID: "0b3434ec-c2c8-47cf-a2e4-dc61725564d7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:35:55 crc kubenswrapper[4693]: I1008 07:35:55.856215 4693 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b3434ec-c2c8-47cf-a2e4-dc61725564d7-logs\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:55 crc kubenswrapper[4693]: I1008 07:35:55.859514 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b3434ec-c2c8-47cf-a2e4-dc61725564d7-kube-api-access-6qt7h" (OuterVolumeSpecName: "kube-api-access-6qt7h") pod "0b3434ec-c2c8-47cf-a2e4-dc61725564d7" (UID: "0b3434ec-c2c8-47cf-a2e4-dc61725564d7"). InnerVolumeSpecName "kube-api-access-6qt7h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:35:55 crc kubenswrapper[4693]: I1008 07:35:55.886672 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b3434ec-c2c8-47cf-a2e4-dc61725564d7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0b3434ec-c2c8-47cf-a2e4-dc61725564d7" (UID: "0b3434ec-c2c8-47cf-a2e4-dc61725564d7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:35:55 crc kubenswrapper[4693]: I1008 07:35:55.888909 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b3434ec-c2c8-47cf-a2e4-dc61725564d7-config-data" (OuterVolumeSpecName: "config-data") pod "0b3434ec-c2c8-47cf-a2e4-dc61725564d7" (UID: "0b3434ec-c2c8-47cf-a2e4-dc61725564d7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:35:55 crc kubenswrapper[4693]: I1008 07:35:55.945865 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 08 07:35:55 crc kubenswrapper[4693]: I1008 07:35:55.946923 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 08 07:35:55 crc kubenswrapper[4693]: I1008 07:35:55.957468 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qt7h\" (UniqueName: \"kubernetes.io/projected/0b3434ec-c2c8-47cf-a2e4-dc61725564d7-kube-api-access-6qt7h\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:55 crc kubenswrapper[4693]: I1008 07:35:55.957503 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b3434ec-c2c8-47cf-a2e4-dc61725564d7-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:55 crc kubenswrapper[4693]: I1008 07:35:55.957517 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b3434ec-c2c8-47cf-a2e4-dc61725564d7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:56 crc kubenswrapper[4693]: I1008 07:35:56.654704 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c9e121a7-a4c1-4813-a55f-c0579fa72459","Type":"ContainerStarted","Data":"8f3aaaa01cc6350a4abdda16a410967dcf6571a855417e4c74f923b91aaba7a6"} Oct 08 07:35:56 crc kubenswrapper[4693]: I1008 07:35:56.661484 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 08 07:35:56 crc kubenswrapper[4693]: I1008 07:35:56.662050 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0b3434ec-c2c8-47cf-a2e4-dc61725564d7","Type":"ContainerDied","Data":"5308ffe4486b2969e831b82f5207a57ac28f4f64085b9ff008d8b95d3ebb2b0d"} Oct 08 07:35:56 crc kubenswrapper[4693]: I1008 07:35:56.662098 4693 scope.go:117] "RemoveContainer" containerID="9c9df325479f33b98f1bd9ef16397c253b77e71fc52d512daa665b9ae419a7c9" Oct 08 07:35:56 crc kubenswrapper[4693]: I1008 07:35:56.685185 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.685170166 podStartE2EDuration="2.685170166s" podCreationTimestamp="2025-10-08 07:35:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:35:56.674927409 +0000 UTC m=+1142.045892364" watchObservedRunningTime="2025-10-08 07:35:56.685170166 +0000 UTC m=+1142.056135101" Oct 08 07:35:56 crc kubenswrapper[4693]: I1008 07:35:56.693388 4693 scope.go:117] "RemoveContainer" containerID="ed18afc701feb69fe62c60af4cacc371b91e159357d41d3301318be9d2e67554" Oct 08 07:35:56 crc kubenswrapper[4693]: I1008 07:35:56.707878 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 08 07:35:56 crc kubenswrapper[4693]: I1008 07:35:56.716931 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 08 07:35:56 crc kubenswrapper[4693]: I1008 07:35:56.729203 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 08 07:35:56 crc kubenswrapper[4693]: E1008 07:35:56.729694 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b3434ec-c2c8-47cf-a2e4-dc61725564d7" containerName="nova-api-log" Oct 08 07:35:56 crc kubenswrapper[4693]: I1008 07:35:56.729720 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b3434ec-c2c8-47cf-a2e4-dc61725564d7" containerName="nova-api-log" Oct 08 07:35:56 crc kubenswrapper[4693]: E1008 07:35:56.729769 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b3434ec-c2c8-47cf-a2e4-dc61725564d7" containerName="nova-api-api" Oct 08 07:35:56 crc kubenswrapper[4693]: I1008 07:35:56.729778 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b3434ec-c2c8-47cf-a2e4-dc61725564d7" containerName="nova-api-api" Oct 08 07:35:56 crc kubenswrapper[4693]: I1008 07:35:56.730002 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b3434ec-c2c8-47cf-a2e4-dc61725564d7" containerName="nova-api-log" Oct 08 07:35:56 crc kubenswrapper[4693]: I1008 07:35:56.730031 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b3434ec-c2c8-47cf-a2e4-dc61725564d7" containerName="nova-api-api" Oct 08 07:35:56 crc kubenswrapper[4693]: I1008 07:35:56.733294 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 08 07:35:56 crc kubenswrapper[4693]: I1008 07:35:56.735585 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 08 07:35:56 crc kubenswrapper[4693]: I1008 07:35:56.748954 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 08 07:35:56 crc kubenswrapper[4693]: I1008 07:35:56.878470 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3037b33-df0b-41b2-8a21-82b16d98ceea-logs\") pod \"nova-api-0\" (UID: \"b3037b33-df0b-41b2-8a21-82b16d98ceea\") " pod="openstack/nova-api-0" Oct 08 07:35:56 crc kubenswrapper[4693]: I1008 07:35:56.878579 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3037b33-df0b-41b2-8a21-82b16d98ceea-config-data\") pod \"nova-api-0\" (UID: \"b3037b33-df0b-41b2-8a21-82b16d98ceea\") " pod="openstack/nova-api-0" Oct 08 07:35:56 crc kubenswrapper[4693]: I1008 07:35:56.878600 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9s52\" (UniqueName: \"kubernetes.io/projected/b3037b33-df0b-41b2-8a21-82b16d98ceea-kube-api-access-z9s52\") pod \"nova-api-0\" (UID: \"b3037b33-df0b-41b2-8a21-82b16d98ceea\") " pod="openstack/nova-api-0" Oct 08 07:35:56 crc kubenswrapper[4693]: I1008 07:35:56.878976 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3037b33-df0b-41b2-8a21-82b16d98ceea-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b3037b33-df0b-41b2-8a21-82b16d98ceea\") " pod="openstack/nova-api-0" Oct 08 07:35:56 crc kubenswrapper[4693]: I1008 07:35:56.981251 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3037b33-df0b-41b2-8a21-82b16d98ceea-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b3037b33-df0b-41b2-8a21-82b16d98ceea\") " pod="openstack/nova-api-0" Oct 08 07:35:56 crc kubenswrapper[4693]: I1008 07:35:56.981400 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3037b33-df0b-41b2-8a21-82b16d98ceea-logs\") pod \"nova-api-0\" (UID: \"b3037b33-df0b-41b2-8a21-82b16d98ceea\") " pod="openstack/nova-api-0" Oct 08 07:35:56 crc kubenswrapper[4693]: I1008 07:35:56.981591 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3037b33-df0b-41b2-8a21-82b16d98ceea-config-data\") pod \"nova-api-0\" (UID: \"b3037b33-df0b-41b2-8a21-82b16d98ceea\") " pod="openstack/nova-api-0" Oct 08 07:35:56 crc kubenswrapper[4693]: I1008 07:35:56.981648 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9s52\" (UniqueName: \"kubernetes.io/projected/b3037b33-df0b-41b2-8a21-82b16d98ceea-kube-api-access-z9s52\") pod \"nova-api-0\" (UID: \"b3037b33-df0b-41b2-8a21-82b16d98ceea\") " pod="openstack/nova-api-0" Oct 08 07:35:56 crc kubenswrapper[4693]: I1008 07:35:56.982152 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3037b33-df0b-41b2-8a21-82b16d98ceea-logs\") pod \"nova-api-0\" (UID: \"b3037b33-df0b-41b2-8a21-82b16d98ceea\") " pod="openstack/nova-api-0" Oct 08 07:35:56 crc kubenswrapper[4693]: I1008 07:35:56.986695 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3037b33-df0b-41b2-8a21-82b16d98ceea-config-data\") pod \"nova-api-0\" (UID: \"b3037b33-df0b-41b2-8a21-82b16d98ceea\") " pod="openstack/nova-api-0" Oct 08 07:35:56 crc kubenswrapper[4693]: I1008 07:35:56.994175 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3037b33-df0b-41b2-8a21-82b16d98ceea-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b3037b33-df0b-41b2-8a21-82b16d98ceea\") " pod="openstack/nova-api-0" Oct 08 07:35:57 crc kubenswrapper[4693]: I1008 07:35:57.021746 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9s52\" (UniqueName: \"kubernetes.io/projected/b3037b33-df0b-41b2-8a21-82b16d98ceea-kube-api-access-z9s52\") pod \"nova-api-0\" (UID: \"b3037b33-df0b-41b2-8a21-82b16d98ceea\") " pod="openstack/nova-api-0" Oct 08 07:35:57 crc kubenswrapper[4693]: I1008 07:35:57.057305 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 08 07:35:57 crc kubenswrapper[4693]: I1008 07:35:57.372896 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b3434ec-c2c8-47cf-a2e4-dc61725564d7" path="/var/lib/kubelet/pods/0b3434ec-c2c8-47cf-a2e4-dc61725564d7/volumes" Oct 08 07:35:57 crc kubenswrapper[4693]: I1008 07:35:57.531444 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 08 07:35:57 crc kubenswrapper[4693]: I1008 07:35:57.676446 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b3037b33-df0b-41b2-8a21-82b16d98ceea","Type":"ContainerStarted","Data":"ef138bc8d4c4a0549ac8c6a2ccfdde64cb456d3d90029550321c8acd828c96a6"} Oct 08 07:35:58 crc kubenswrapper[4693]: I1008 07:35:58.407446 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 08 07:35:58 crc kubenswrapper[4693]: I1008 07:35:58.408448 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="8a58b116-77cf-41d3-87cd-99880a4db87f" containerName="kube-state-metrics" containerID="cri-o://8cd9894db58134c0789884d1ed536614ad507f30ab2bfb0efca2e2de0630124d" gracePeriod=30 Oct 08 07:35:58 crc kubenswrapper[4693]: I1008 07:35:58.701745 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b3037b33-df0b-41b2-8a21-82b16d98ceea","Type":"ContainerStarted","Data":"2187123fe1df6fb7f983953ce88dfbd095410762cee218bbf0a5d2748c7c5972"} Oct 08 07:35:58 crc kubenswrapper[4693]: I1008 07:35:58.702179 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b3037b33-df0b-41b2-8a21-82b16d98ceea","Type":"ContainerStarted","Data":"ccec4e693d93421248fb10c020efdee7de15bd4a2c5ea1dff34b9a18c8dc1edf"} Oct 08 07:35:58 crc kubenswrapper[4693]: I1008 07:35:58.706821 4693 generic.go:334] "Generic (PLEG): container finished" podID="8a58b116-77cf-41d3-87cd-99880a4db87f" containerID="8cd9894db58134c0789884d1ed536614ad507f30ab2bfb0efca2e2de0630124d" exitCode=2 Oct 08 07:35:58 crc kubenswrapper[4693]: I1008 07:35:58.706861 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"8a58b116-77cf-41d3-87cd-99880a4db87f","Type":"ContainerDied","Data":"8cd9894db58134c0789884d1ed536614ad507f30ab2bfb0efca2e2de0630124d"} Oct 08 07:35:58 crc kubenswrapper[4693]: I1008 07:35:58.725489 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.7254335899999997 podStartE2EDuration="2.72543359s" podCreationTimestamp="2025-10-08 07:35:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:35:58.716007981 +0000 UTC m=+1144.086972926" watchObservedRunningTime="2025-10-08 07:35:58.72543359 +0000 UTC m=+1144.096398535" Oct 08 07:35:58 crc kubenswrapper[4693]: I1008 07:35:58.884867 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.030696 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8xggg\" (UniqueName: \"kubernetes.io/projected/8a58b116-77cf-41d3-87cd-99880a4db87f-kube-api-access-8xggg\") pod \"8a58b116-77cf-41d3-87cd-99880a4db87f\" (UID: \"8a58b116-77cf-41d3-87cd-99880a4db87f\") " Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.034609 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a58b116-77cf-41d3-87cd-99880a4db87f-kube-api-access-8xggg" (OuterVolumeSpecName: "kube-api-access-8xggg") pod "8a58b116-77cf-41d3-87cd-99880a4db87f" (UID: "8a58b116-77cf-41d3-87cd-99880a4db87f"). InnerVolumeSpecName "kube-api-access-8xggg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.133646 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8xggg\" (UniqueName: \"kubernetes.io/projected/8a58b116-77cf-41d3-87cd-99880a4db87f-kube-api-access-8xggg\") on node \"crc\" DevicePath \"\"" Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.717227 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.717242 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"8a58b116-77cf-41d3-87cd-99880a4db87f","Type":"ContainerDied","Data":"8f55365903111de135f4d8504db9da6a9482b5a13b7e01fb33783d0d8f12d5d2"} Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.717318 4693 scope.go:117] "RemoveContainer" containerID="8cd9894db58134c0789884d1ed536614ad507f30ab2bfb0efca2e2de0630124d" Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.743352 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.752270 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.769586 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 08 07:35:59 crc kubenswrapper[4693]: E1008 07:35:59.770021 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a58b116-77cf-41d3-87cd-99880a4db87f" containerName="kube-state-metrics" Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.770038 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a58b116-77cf-41d3-87cd-99880a4db87f" containerName="kube-state-metrics" Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.770233 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a58b116-77cf-41d3-87cd-99880a4db87f" containerName="kube-state-metrics" Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.770839 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.772976 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.773374 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.778716 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.846483 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/028998c5-3dec-46de-a5bb-bc5855df099e-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"028998c5-3dec-46de-a5bb-bc5855df099e\") " pod="openstack/kube-state-metrics-0" Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.846581 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gm7bt\" (UniqueName: \"kubernetes.io/projected/028998c5-3dec-46de-a5bb-bc5855df099e-kube-api-access-gm7bt\") pod \"kube-state-metrics-0\" (UID: \"028998c5-3dec-46de-a5bb-bc5855df099e\") " pod="openstack/kube-state-metrics-0" Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.847063 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/028998c5-3dec-46de-a5bb-bc5855df099e-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"028998c5-3dec-46de-a5bb-bc5855df099e\") " pod="openstack/kube-state-metrics-0" Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.847133 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/028998c5-3dec-46de-a5bb-bc5855df099e-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"028998c5-3dec-46de-a5bb-bc5855df099e\") " pod="openstack/kube-state-metrics-0" Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.948392 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/028998c5-3dec-46de-a5bb-bc5855df099e-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"028998c5-3dec-46de-a5bb-bc5855df099e\") " pod="openstack/kube-state-metrics-0" Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.948435 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/028998c5-3dec-46de-a5bb-bc5855df099e-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"028998c5-3dec-46de-a5bb-bc5855df099e\") " pod="openstack/kube-state-metrics-0" Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.948492 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/028998c5-3dec-46de-a5bb-bc5855df099e-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"028998c5-3dec-46de-a5bb-bc5855df099e\") " pod="openstack/kube-state-metrics-0" Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.948522 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gm7bt\" (UniqueName: \"kubernetes.io/projected/028998c5-3dec-46de-a5bb-bc5855df099e-kube-api-access-gm7bt\") pod \"kube-state-metrics-0\" (UID: \"028998c5-3dec-46de-a5bb-bc5855df099e\") " pod="openstack/kube-state-metrics-0" Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.953649 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/028998c5-3dec-46de-a5bb-bc5855df099e-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"028998c5-3dec-46de-a5bb-bc5855df099e\") " pod="openstack/kube-state-metrics-0" Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.956133 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.958433 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/028998c5-3dec-46de-a5bb-bc5855df099e-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"028998c5-3dec-46de-a5bb-bc5855df099e\") " pod="openstack/kube-state-metrics-0" Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.961775 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/028998c5-3dec-46de-a5bb-bc5855df099e-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"028998c5-3dec-46de-a5bb-bc5855df099e\") " pod="openstack/kube-state-metrics-0" Oct 08 07:35:59 crc kubenswrapper[4693]: I1008 07:35:59.994426 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gm7bt\" (UniqueName: \"kubernetes.io/projected/028998c5-3dec-46de-a5bb-bc5855df099e-kube-api-access-gm7bt\") pod \"kube-state-metrics-0\" (UID: \"028998c5-3dec-46de-a5bb-bc5855df099e\") " pod="openstack/kube-state-metrics-0" Oct 08 07:36:00 crc kubenswrapper[4693]: I1008 07:36:00.006526 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 08 07:36:00 crc kubenswrapper[4693]: I1008 07:36:00.033139 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:36:00 crc kubenswrapper[4693]: I1008 07:36:00.033542 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="71ae5ffc-8a6c-496c-83b3-3a394b90f549" containerName="ceilometer-central-agent" containerID="cri-o://d5fcd1875845bc57a947cdc7810085296d90149dfb25d355767afeb8b36f93ba" gracePeriod=30 Oct 08 07:36:00 crc kubenswrapper[4693]: I1008 07:36:00.033702 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="71ae5ffc-8a6c-496c-83b3-3a394b90f549" containerName="proxy-httpd" containerID="cri-o://609b5f50047627d1bad2d12f73ca998aaf0a10d1573f6f91135f548d660f9bf4" gracePeriod=30 Oct 08 07:36:00 crc kubenswrapper[4693]: I1008 07:36:00.033782 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="71ae5ffc-8a6c-496c-83b3-3a394b90f549" containerName="sg-core" containerID="cri-o://fd0050d8e89905ba294159da36d938aae37716f8f657594defb5a128dacf0e95" gracePeriod=30 Oct 08 07:36:00 crc kubenswrapper[4693]: I1008 07:36:00.033874 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="71ae5ffc-8a6c-496c-83b3-3a394b90f549" containerName="ceilometer-notification-agent" containerID="cri-o://c08ef2fccb2e347f9047947084efb535d2e4e42b585f18f5b65f6a3c5d7741ba" gracePeriod=30 Oct 08 07:36:00 crc kubenswrapper[4693]: I1008 07:36:00.100089 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 08 07:36:00 crc kubenswrapper[4693]: I1008 07:36:00.561795 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 08 07:36:00 crc kubenswrapper[4693]: I1008 07:36:00.734538 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"028998c5-3dec-46de-a5bb-bc5855df099e","Type":"ContainerStarted","Data":"2da8e418b9bf4829cc64b1f1cf0cf5507605926519a213ba2ed4ff6af2015adc"} Oct 08 07:36:00 crc kubenswrapper[4693]: I1008 07:36:00.740052 4693 generic.go:334] "Generic (PLEG): container finished" podID="71ae5ffc-8a6c-496c-83b3-3a394b90f549" containerID="609b5f50047627d1bad2d12f73ca998aaf0a10d1573f6f91135f548d660f9bf4" exitCode=0 Oct 08 07:36:00 crc kubenswrapper[4693]: I1008 07:36:00.740188 4693 generic.go:334] "Generic (PLEG): container finished" podID="71ae5ffc-8a6c-496c-83b3-3a394b90f549" containerID="fd0050d8e89905ba294159da36d938aae37716f8f657594defb5a128dacf0e95" exitCode=2 Oct 08 07:36:00 crc kubenswrapper[4693]: I1008 07:36:00.740219 4693 generic.go:334] "Generic (PLEG): container finished" podID="71ae5ffc-8a6c-496c-83b3-3a394b90f549" containerID="d5fcd1875845bc57a947cdc7810085296d90149dfb25d355767afeb8b36f93ba" exitCode=0 Oct 08 07:36:00 crc kubenswrapper[4693]: I1008 07:36:00.740123 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"71ae5ffc-8a6c-496c-83b3-3a394b90f549","Type":"ContainerDied","Data":"609b5f50047627d1bad2d12f73ca998aaf0a10d1573f6f91135f548d660f9bf4"} Oct 08 07:36:00 crc kubenswrapper[4693]: I1008 07:36:00.740277 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"71ae5ffc-8a6c-496c-83b3-3a394b90f549","Type":"ContainerDied","Data":"fd0050d8e89905ba294159da36d938aae37716f8f657594defb5a128dacf0e95"} Oct 08 07:36:00 crc kubenswrapper[4693]: I1008 07:36:00.740306 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"71ae5ffc-8a6c-496c-83b3-3a394b90f549","Type":"ContainerDied","Data":"d5fcd1875845bc57a947cdc7810085296d90149dfb25d355767afeb8b36f93ba"} Oct 08 07:36:00 crc kubenswrapper[4693]: I1008 07:36:00.945453 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 08 07:36:00 crc kubenswrapper[4693]: I1008 07:36:00.945779 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 08 07:36:01 crc kubenswrapper[4693]: I1008 07:36:01.373570 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a58b116-77cf-41d3-87cd-99880a4db87f" path="/var/lib/kubelet/pods/8a58b116-77cf-41d3-87cd-99880a4db87f/volumes" Oct 08 07:36:01 crc kubenswrapper[4693]: I1008 07:36:01.751745 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"028998c5-3dec-46de-a5bb-bc5855df099e","Type":"ContainerStarted","Data":"8f1593220813e358346cdfc864f5c91f75a0e79b726617baa40d30d5ae1b0383"} Oct 08 07:36:01 crc kubenswrapper[4693]: I1008 07:36:01.752156 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 08 07:36:01 crc kubenswrapper[4693]: I1008 07:36:01.779321 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.348143048 podStartE2EDuration="2.779295869s" podCreationTimestamp="2025-10-08 07:35:59 +0000 UTC" firstStartedPulling="2025-10-08 07:36:00.572275956 +0000 UTC m=+1145.943240891" lastFinishedPulling="2025-10-08 07:36:01.003428747 +0000 UTC m=+1146.374393712" observedRunningTime="2025-10-08 07:36:01.766302178 +0000 UTC m=+1147.137267133" watchObservedRunningTime="2025-10-08 07:36:01.779295869 +0000 UTC m=+1147.150260824" Oct 08 07:36:01 crc kubenswrapper[4693]: I1008 07:36:01.959011 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="065b63c2-611d-4461-bc50-4cfb9e120bba" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.192:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 08 07:36:01 crc kubenswrapper[4693]: I1008 07:36:01.959179 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="065b63c2-611d-4461-bc50-4cfb9e120bba" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.192:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.134926 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.207722 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71ae5ffc-8a6c-496c-83b3-3a394b90f549-combined-ca-bundle\") pod \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.207862 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/71ae5ffc-8a6c-496c-83b3-3a394b90f549-run-httpd\") pod \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.207927 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/71ae5ffc-8a6c-496c-83b3-3a394b90f549-scripts\") pod \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.207950 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8wfdg\" (UniqueName: \"kubernetes.io/projected/71ae5ffc-8a6c-496c-83b3-3a394b90f549-kube-api-access-8wfdg\") pod \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.208003 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/71ae5ffc-8a6c-496c-83b3-3a394b90f549-sg-core-conf-yaml\") pod \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.208043 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71ae5ffc-8a6c-496c-83b3-3a394b90f549-config-data\") pod \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.208089 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/71ae5ffc-8a6c-496c-83b3-3a394b90f549-log-httpd\") pod \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\" (UID: \"71ae5ffc-8a6c-496c-83b3-3a394b90f549\") " Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.208658 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71ae5ffc-8a6c-496c-83b3-3a394b90f549-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "71ae5ffc-8a6c-496c-83b3-3a394b90f549" (UID: "71ae5ffc-8a6c-496c-83b3-3a394b90f549"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.208903 4693 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/71ae5ffc-8a6c-496c-83b3-3a394b90f549-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.208997 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71ae5ffc-8a6c-496c-83b3-3a394b90f549-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "71ae5ffc-8a6c-496c-83b3-3a394b90f549" (UID: "71ae5ffc-8a6c-496c-83b3-3a394b90f549"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.215643 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71ae5ffc-8a6c-496c-83b3-3a394b90f549-kube-api-access-8wfdg" (OuterVolumeSpecName: "kube-api-access-8wfdg") pod "71ae5ffc-8a6c-496c-83b3-3a394b90f549" (UID: "71ae5ffc-8a6c-496c-83b3-3a394b90f549"). InnerVolumeSpecName "kube-api-access-8wfdg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.223104 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71ae5ffc-8a6c-496c-83b3-3a394b90f549-scripts" (OuterVolumeSpecName: "scripts") pod "71ae5ffc-8a6c-496c-83b3-3a394b90f549" (UID: "71ae5ffc-8a6c-496c-83b3-3a394b90f549"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.252014 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71ae5ffc-8a6c-496c-83b3-3a394b90f549-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "71ae5ffc-8a6c-496c-83b3-3a394b90f549" (UID: "71ae5ffc-8a6c-496c-83b3-3a394b90f549"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.310701 4693 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/71ae5ffc-8a6c-496c-83b3-3a394b90f549-scripts\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.310739 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8wfdg\" (UniqueName: \"kubernetes.io/projected/71ae5ffc-8a6c-496c-83b3-3a394b90f549-kube-api-access-8wfdg\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.310749 4693 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/71ae5ffc-8a6c-496c-83b3-3a394b90f549-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.310757 4693 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/71ae5ffc-8a6c-496c-83b3-3a394b90f549-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.316455 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71ae5ffc-8a6c-496c-83b3-3a394b90f549-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "71ae5ffc-8a6c-496c-83b3-3a394b90f549" (UID: "71ae5ffc-8a6c-496c-83b3-3a394b90f549"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.325979 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71ae5ffc-8a6c-496c-83b3-3a394b90f549-config-data" (OuterVolumeSpecName: "config-data") pod "71ae5ffc-8a6c-496c-83b3-3a394b90f549" (UID: "71ae5ffc-8a6c-496c-83b3-3a394b90f549"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.412239 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71ae5ffc-8a6c-496c-83b3-3a394b90f549-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.412264 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71ae5ffc-8a6c-496c-83b3-3a394b90f549-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.770621 4693 generic.go:334] "Generic (PLEG): container finished" podID="71ae5ffc-8a6c-496c-83b3-3a394b90f549" containerID="c08ef2fccb2e347f9047947084efb535d2e4e42b585f18f5b65f6a3c5d7741ba" exitCode=0 Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.771809 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.773919 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"71ae5ffc-8a6c-496c-83b3-3a394b90f549","Type":"ContainerDied","Data":"c08ef2fccb2e347f9047947084efb535d2e4e42b585f18f5b65f6a3c5d7741ba"} Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.773990 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"71ae5ffc-8a6c-496c-83b3-3a394b90f549","Type":"ContainerDied","Data":"d2b744f6a3941b8d78f38c58e786099c0740f85ccbc7e34e7f3657000321efd9"} Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.774021 4693 scope.go:117] "RemoveContainer" containerID="609b5f50047627d1bad2d12f73ca998aaf0a10d1573f6f91135f548d660f9bf4" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.801949 4693 scope.go:117] "RemoveContainer" containerID="fd0050d8e89905ba294159da36d938aae37716f8f657594defb5a128dacf0e95" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.827107 4693 scope.go:117] "RemoveContainer" containerID="c08ef2fccb2e347f9047947084efb535d2e4e42b585f18f5b65f6a3c5d7741ba" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.830759 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.861767 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.864487 4693 scope.go:117] "RemoveContainer" containerID="d5fcd1875845bc57a947cdc7810085296d90149dfb25d355767afeb8b36f93ba" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.869578 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:36:02 crc kubenswrapper[4693]: E1008 07:36:02.870066 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71ae5ffc-8a6c-496c-83b3-3a394b90f549" containerName="ceilometer-central-agent" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.870085 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="71ae5ffc-8a6c-496c-83b3-3a394b90f549" containerName="ceilometer-central-agent" Oct 08 07:36:02 crc kubenswrapper[4693]: E1008 07:36:02.870113 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71ae5ffc-8a6c-496c-83b3-3a394b90f549" containerName="sg-core" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.870121 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="71ae5ffc-8a6c-496c-83b3-3a394b90f549" containerName="sg-core" Oct 08 07:36:02 crc kubenswrapper[4693]: E1008 07:36:02.870147 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71ae5ffc-8a6c-496c-83b3-3a394b90f549" containerName="ceilometer-notification-agent" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.870157 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="71ae5ffc-8a6c-496c-83b3-3a394b90f549" containerName="ceilometer-notification-agent" Oct 08 07:36:02 crc kubenswrapper[4693]: E1008 07:36:02.870171 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71ae5ffc-8a6c-496c-83b3-3a394b90f549" containerName="proxy-httpd" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.870181 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="71ae5ffc-8a6c-496c-83b3-3a394b90f549" containerName="proxy-httpd" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.870392 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="71ae5ffc-8a6c-496c-83b3-3a394b90f549" containerName="proxy-httpd" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.870417 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="71ae5ffc-8a6c-496c-83b3-3a394b90f549" containerName="ceilometer-notification-agent" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.870441 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="71ae5ffc-8a6c-496c-83b3-3a394b90f549" containerName="ceilometer-central-agent" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.870464 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="71ae5ffc-8a6c-496c-83b3-3a394b90f549" containerName="sg-core" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.872603 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.874688 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.874806 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.875067 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.877142 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.911214 4693 scope.go:117] "RemoveContainer" containerID="609b5f50047627d1bad2d12f73ca998aaf0a10d1573f6f91135f548d660f9bf4" Oct 08 07:36:02 crc kubenswrapper[4693]: E1008 07:36:02.911691 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"609b5f50047627d1bad2d12f73ca998aaf0a10d1573f6f91135f548d660f9bf4\": container with ID starting with 609b5f50047627d1bad2d12f73ca998aaf0a10d1573f6f91135f548d660f9bf4 not found: ID does not exist" containerID="609b5f50047627d1bad2d12f73ca998aaf0a10d1573f6f91135f548d660f9bf4" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.911726 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"609b5f50047627d1bad2d12f73ca998aaf0a10d1573f6f91135f548d660f9bf4"} err="failed to get container status \"609b5f50047627d1bad2d12f73ca998aaf0a10d1573f6f91135f548d660f9bf4\": rpc error: code = NotFound desc = could not find container \"609b5f50047627d1bad2d12f73ca998aaf0a10d1573f6f91135f548d660f9bf4\": container with ID starting with 609b5f50047627d1bad2d12f73ca998aaf0a10d1573f6f91135f548d660f9bf4 not found: ID does not exist" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.911750 4693 scope.go:117] "RemoveContainer" containerID="fd0050d8e89905ba294159da36d938aae37716f8f657594defb5a128dacf0e95" Oct 08 07:36:02 crc kubenswrapper[4693]: E1008 07:36:02.912025 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd0050d8e89905ba294159da36d938aae37716f8f657594defb5a128dacf0e95\": container with ID starting with fd0050d8e89905ba294159da36d938aae37716f8f657594defb5a128dacf0e95 not found: ID does not exist" containerID="fd0050d8e89905ba294159da36d938aae37716f8f657594defb5a128dacf0e95" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.912117 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd0050d8e89905ba294159da36d938aae37716f8f657594defb5a128dacf0e95"} err="failed to get container status \"fd0050d8e89905ba294159da36d938aae37716f8f657594defb5a128dacf0e95\": rpc error: code = NotFound desc = could not find container \"fd0050d8e89905ba294159da36d938aae37716f8f657594defb5a128dacf0e95\": container with ID starting with fd0050d8e89905ba294159da36d938aae37716f8f657594defb5a128dacf0e95 not found: ID does not exist" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.912186 4693 scope.go:117] "RemoveContainer" containerID="c08ef2fccb2e347f9047947084efb535d2e4e42b585f18f5b65f6a3c5d7741ba" Oct 08 07:36:02 crc kubenswrapper[4693]: E1008 07:36:02.912464 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c08ef2fccb2e347f9047947084efb535d2e4e42b585f18f5b65f6a3c5d7741ba\": container with ID starting with c08ef2fccb2e347f9047947084efb535d2e4e42b585f18f5b65f6a3c5d7741ba not found: ID does not exist" containerID="c08ef2fccb2e347f9047947084efb535d2e4e42b585f18f5b65f6a3c5d7741ba" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.912546 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c08ef2fccb2e347f9047947084efb535d2e4e42b585f18f5b65f6a3c5d7741ba"} err="failed to get container status \"c08ef2fccb2e347f9047947084efb535d2e4e42b585f18f5b65f6a3c5d7741ba\": rpc error: code = NotFound desc = could not find container \"c08ef2fccb2e347f9047947084efb535d2e4e42b585f18f5b65f6a3c5d7741ba\": container with ID starting with c08ef2fccb2e347f9047947084efb535d2e4e42b585f18f5b65f6a3c5d7741ba not found: ID does not exist" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.912658 4693 scope.go:117] "RemoveContainer" containerID="d5fcd1875845bc57a947cdc7810085296d90149dfb25d355767afeb8b36f93ba" Oct 08 07:36:02 crc kubenswrapper[4693]: E1008 07:36:02.913029 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5fcd1875845bc57a947cdc7810085296d90149dfb25d355767afeb8b36f93ba\": container with ID starting with d5fcd1875845bc57a947cdc7810085296d90149dfb25d355767afeb8b36f93ba not found: ID does not exist" containerID="d5fcd1875845bc57a947cdc7810085296d90149dfb25d355767afeb8b36f93ba" Oct 08 07:36:02 crc kubenswrapper[4693]: I1008 07:36:02.913110 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5fcd1875845bc57a947cdc7810085296d90149dfb25d355767afeb8b36f93ba"} err="failed to get container status \"d5fcd1875845bc57a947cdc7810085296d90149dfb25d355767afeb8b36f93ba\": rpc error: code = NotFound desc = could not find container \"d5fcd1875845bc57a947cdc7810085296d90149dfb25d355767afeb8b36f93ba\": container with ID starting with d5fcd1875845bc57a947cdc7810085296d90149dfb25d355767afeb8b36f93ba not found: ID does not exist" Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.022014 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-log-httpd\") pod \"ceilometer-0\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " pod="openstack/ceilometer-0" Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.022338 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-run-httpd\") pod \"ceilometer-0\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " pod="openstack/ceilometer-0" Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.022464 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " pod="openstack/ceilometer-0" Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.022583 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " pod="openstack/ceilometer-0" Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.022711 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-config-data\") pod \"ceilometer-0\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " pod="openstack/ceilometer-0" Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.022858 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-scripts\") pod \"ceilometer-0\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " pod="openstack/ceilometer-0" Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.022979 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltfrs\" (UniqueName: \"kubernetes.io/projected/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-kube-api-access-ltfrs\") pod \"ceilometer-0\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " pod="openstack/ceilometer-0" Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.023106 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " pod="openstack/ceilometer-0" Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.124969 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " pod="openstack/ceilometer-0" Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.125251 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-log-httpd\") pod \"ceilometer-0\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " pod="openstack/ceilometer-0" Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.125280 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-run-httpd\") pod \"ceilometer-0\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " pod="openstack/ceilometer-0" Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.125321 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " pod="openstack/ceilometer-0" Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.125349 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " pod="openstack/ceilometer-0" Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.125390 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-config-data\") pod \"ceilometer-0\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " pod="openstack/ceilometer-0" Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.125411 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-scripts\") pod \"ceilometer-0\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " pod="openstack/ceilometer-0" Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.125433 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltfrs\" (UniqueName: \"kubernetes.io/projected/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-kube-api-access-ltfrs\") pod \"ceilometer-0\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " pod="openstack/ceilometer-0" Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.126099 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-log-httpd\") pod \"ceilometer-0\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " pod="openstack/ceilometer-0" Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.129317 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " pod="openstack/ceilometer-0" Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.129523 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-config-data\") pod \"ceilometer-0\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " pod="openstack/ceilometer-0" Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.129653 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-run-httpd\") pod \"ceilometer-0\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " pod="openstack/ceilometer-0" Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.130855 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " pod="openstack/ceilometer-0" Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.132680 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " pod="openstack/ceilometer-0" Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.134138 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-scripts\") pod \"ceilometer-0\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " pod="openstack/ceilometer-0" Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.152495 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltfrs\" (UniqueName: \"kubernetes.io/projected/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-kube-api-access-ltfrs\") pod \"ceilometer-0\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " pod="openstack/ceilometer-0" Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.213246 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.375964 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71ae5ffc-8a6c-496c-83b3-3a394b90f549" path="/var/lib/kubelet/pods/71ae5ffc-8a6c-496c-83b3-3a394b90f549/volumes" Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.666283 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:36:03 crc kubenswrapper[4693]: I1008 07:36:03.781871 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef71f046-a392-4737-a4f8-e7a5ffdf66a6","Type":"ContainerStarted","Data":"e49957ccc08afdbfbc75d54b3bbbaaebbc7cda4e50cabb80cbc6a4665e156259"} Oct 08 07:36:04 crc kubenswrapper[4693]: I1008 07:36:04.800320 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef71f046-a392-4737-a4f8-e7a5ffdf66a6","Type":"ContainerStarted","Data":"f4b0940acc9401f9428e147bede0da8f22a63e3db74bdc97620a8b428d92bd07"} Oct 08 07:36:05 crc kubenswrapper[4693]: I1008 07:36:05.006469 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 08 07:36:05 crc kubenswrapper[4693]: I1008 07:36:05.035082 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 08 07:36:05 crc kubenswrapper[4693]: I1008 07:36:05.825993 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef71f046-a392-4737-a4f8-e7a5ffdf66a6","Type":"ContainerStarted","Data":"b37720e3b1fd5c07702a322df68b159fddccf43680072dd9da1bb3bcdd9c84f2"} Oct 08 07:36:05 crc kubenswrapper[4693]: I1008 07:36:05.882715 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 08 07:36:06 crc kubenswrapper[4693]: I1008 07:36:06.841171 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef71f046-a392-4737-a4f8-e7a5ffdf66a6","Type":"ContainerStarted","Data":"705ee858fbc5cb39f5bb791663c6f4473de9546a0cdd86ae746e5960b679afe8"} Oct 08 07:36:07 crc kubenswrapper[4693]: I1008 07:36:07.059397 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 08 07:36:07 crc kubenswrapper[4693]: I1008 07:36:07.059753 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 08 07:36:07 crc kubenswrapper[4693]: I1008 07:36:07.857629 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef71f046-a392-4737-a4f8-e7a5ffdf66a6","Type":"ContainerStarted","Data":"e6db77cf85dec276b93411d944a2664d546bd2a0219e653b592854ce89c71868"} Oct 08 07:36:07 crc kubenswrapper[4693]: I1008 07:36:07.858225 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 08 07:36:07 crc kubenswrapper[4693]: I1008 07:36:07.891760 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.518496914 podStartE2EDuration="5.891733877s" podCreationTimestamp="2025-10-08 07:36:02 +0000 UTC" firstStartedPulling="2025-10-08 07:36:03.671496925 +0000 UTC m=+1149.042461860" lastFinishedPulling="2025-10-08 07:36:07.044733858 +0000 UTC m=+1152.415698823" observedRunningTime="2025-10-08 07:36:07.880326633 +0000 UTC m=+1153.251291598" watchObservedRunningTime="2025-10-08 07:36:07.891733877 +0000 UTC m=+1153.262698832" Oct 08 07:36:08 crc kubenswrapper[4693]: I1008 07:36:08.141018 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b3037b33-df0b-41b2-8a21-82b16d98ceea" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.194:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 08 07:36:08 crc kubenswrapper[4693]: I1008 07:36:08.141131 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b3037b33-df0b-41b2-8a21-82b16d98ceea" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.194:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 08 07:36:10 crc kubenswrapper[4693]: I1008 07:36:10.110765 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 08 07:36:10 crc kubenswrapper[4693]: I1008 07:36:10.955269 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 08 07:36:10 crc kubenswrapper[4693]: I1008 07:36:10.960564 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 08 07:36:10 crc kubenswrapper[4693]: I1008 07:36:10.971908 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 08 07:36:11 crc kubenswrapper[4693]: I1008 07:36:11.912354 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 08 07:36:13 crc kubenswrapper[4693]: E1008 07:36:13.758398 4693 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71ae5ffc_8a6c_496c_83b3_3a394b90f549.slice/crio-fd0050d8e89905ba294159da36d938aae37716f8f657594defb5a128dacf0e95.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71ae5ffc_8a6c_496c_83b3_3a394b90f549.slice/crio-d5fcd1875845bc57a947cdc7810085296d90149dfb25d355767afeb8b36f93ba.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a58b116_77cf_41d3_87cd_99880a4db87f.slice/crio-8f55365903111de135f4d8504db9da6a9482b5a13b7e01fb33783d0d8f12d5d2\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71ae5ffc_8a6c_496c_83b3_3a394b90f549.slice/crio-conmon-c08ef2fccb2e347f9047947084efb535d2e4e42b585f18f5b65f6a3c5d7741ba.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71ae5ffc_8a6c_496c_83b3_3a394b90f549.slice/crio-conmon-fd0050d8e89905ba294159da36d938aae37716f8f657594defb5a128dacf0e95.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71ae5ffc_8a6c_496c_83b3_3a394b90f549.slice/crio-conmon-d5fcd1875845bc57a947cdc7810085296d90149dfb25d355767afeb8b36f93ba.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71ae5ffc_8a6c_496c_83b3_3a394b90f549.slice/crio-c08ef2fccb2e347f9047947084efb535d2e4e42b585f18f5b65f6a3c5d7741ba.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a58b116_77cf_41d3_87cd_99880a4db87f.slice/crio-8cd9894db58134c0789884d1ed536614ad507f30ab2bfb0efca2e2de0630124d.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a58b116_77cf_41d3_87cd_99880a4db87f.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71ae5ffc_8a6c_496c_83b3_3a394b90f549.slice/crio-conmon-609b5f50047627d1bad2d12f73ca998aaf0a10d1573f6f91135f548d660f9bf4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71ae5ffc_8a6c_496c_83b3_3a394b90f549.slice/crio-d2b744f6a3941b8d78f38c58e786099c0740f85ccbc7e34e7f3657000321efd9\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71ae5ffc_8a6c_496c_83b3_3a394b90f549.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod936e36b3_6be5_4b41_9473_08474ea5443c.slice/crio-c9e93435cd9329fe1dc590f49028d492162446a1979f70e767d0cc0921b714a0.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a58b116_77cf_41d3_87cd_99880a4db87f.slice/crio-conmon-8cd9894db58134c0789884d1ed536614ad507f30ab2bfb0efca2e2de0630124d.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71ae5ffc_8a6c_496c_83b3_3a394b90f549.slice/crio-609b5f50047627d1bad2d12f73ca998aaf0a10d1573f6f91135f548d660f9bf4.scope\": RecentStats: unable to find data in memory cache]" Oct 08 07:36:13 crc kubenswrapper[4693]: I1008 07:36:13.839894 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:36:13 crc kubenswrapper[4693]: I1008 07:36:13.927504 4693 generic.go:334] "Generic (PLEG): container finished" podID="936e36b3-6be5-4b41-9473-08474ea5443c" containerID="c9e93435cd9329fe1dc590f49028d492162446a1979f70e767d0cc0921b714a0" exitCode=137 Oct 08 07:36:13 crc kubenswrapper[4693]: I1008 07:36:13.928098 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"936e36b3-6be5-4b41-9473-08474ea5443c","Type":"ContainerDied","Data":"c9e93435cd9329fe1dc590f49028d492162446a1979f70e767d0cc0921b714a0"} Oct 08 07:36:13 crc kubenswrapper[4693]: I1008 07:36:13.928129 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:36:13 crc kubenswrapper[4693]: I1008 07:36:13.928158 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"936e36b3-6be5-4b41-9473-08474ea5443c","Type":"ContainerDied","Data":"662a97075996d687c3f3e9cf57adbddbcbef4829514d99b2217715832b1537ce"} Oct 08 07:36:13 crc kubenswrapper[4693]: I1008 07:36:13.928178 4693 scope.go:117] "RemoveContainer" containerID="c9e93435cd9329fe1dc590f49028d492162446a1979f70e767d0cc0921b714a0" Oct 08 07:36:13 crc kubenswrapper[4693]: I1008 07:36:13.951166 4693 scope.go:117] "RemoveContainer" containerID="c9e93435cd9329fe1dc590f49028d492162446a1979f70e767d0cc0921b714a0" Oct 08 07:36:13 crc kubenswrapper[4693]: E1008 07:36:13.951641 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9e93435cd9329fe1dc590f49028d492162446a1979f70e767d0cc0921b714a0\": container with ID starting with c9e93435cd9329fe1dc590f49028d492162446a1979f70e767d0cc0921b714a0 not found: ID does not exist" containerID="c9e93435cd9329fe1dc590f49028d492162446a1979f70e767d0cc0921b714a0" Oct 08 07:36:13 crc kubenswrapper[4693]: I1008 07:36:13.951693 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9e93435cd9329fe1dc590f49028d492162446a1979f70e767d0cc0921b714a0"} err="failed to get container status \"c9e93435cd9329fe1dc590f49028d492162446a1979f70e767d0cc0921b714a0\": rpc error: code = NotFound desc = could not find container \"c9e93435cd9329fe1dc590f49028d492162446a1979f70e767d0cc0921b714a0\": container with ID starting with c9e93435cd9329fe1dc590f49028d492162446a1979f70e767d0cc0921b714a0 not found: ID does not exist" Oct 08 07:36:13 crc kubenswrapper[4693]: I1008 07:36:13.961793 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vc6lv\" (UniqueName: \"kubernetes.io/projected/936e36b3-6be5-4b41-9473-08474ea5443c-kube-api-access-vc6lv\") pod \"936e36b3-6be5-4b41-9473-08474ea5443c\" (UID: \"936e36b3-6be5-4b41-9473-08474ea5443c\") " Oct 08 07:36:13 crc kubenswrapper[4693]: I1008 07:36:13.961859 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/936e36b3-6be5-4b41-9473-08474ea5443c-combined-ca-bundle\") pod \"936e36b3-6be5-4b41-9473-08474ea5443c\" (UID: \"936e36b3-6be5-4b41-9473-08474ea5443c\") " Oct 08 07:36:13 crc kubenswrapper[4693]: I1008 07:36:13.961912 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/936e36b3-6be5-4b41-9473-08474ea5443c-config-data\") pod \"936e36b3-6be5-4b41-9473-08474ea5443c\" (UID: \"936e36b3-6be5-4b41-9473-08474ea5443c\") " Oct 08 07:36:13 crc kubenswrapper[4693]: I1008 07:36:13.980414 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/936e36b3-6be5-4b41-9473-08474ea5443c-kube-api-access-vc6lv" (OuterVolumeSpecName: "kube-api-access-vc6lv") pod "936e36b3-6be5-4b41-9473-08474ea5443c" (UID: "936e36b3-6be5-4b41-9473-08474ea5443c"). InnerVolumeSpecName "kube-api-access-vc6lv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.000283 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/936e36b3-6be5-4b41-9473-08474ea5443c-config-data" (OuterVolumeSpecName: "config-data") pod "936e36b3-6be5-4b41-9473-08474ea5443c" (UID: "936e36b3-6be5-4b41-9473-08474ea5443c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.003083 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/936e36b3-6be5-4b41-9473-08474ea5443c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "936e36b3-6be5-4b41-9473-08474ea5443c" (UID: "936e36b3-6be5-4b41-9473-08474ea5443c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.064260 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vc6lv\" (UniqueName: \"kubernetes.io/projected/936e36b3-6be5-4b41-9473-08474ea5443c-kube-api-access-vc6lv\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.064289 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/936e36b3-6be5-4b41-9473-08474ea5443c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.064299 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/936e36b3-6be5-4b41-9473-08474ea5443c-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.266685 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.277781 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.295721 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 08 07:36:14 crc kubenswrapper[4693]: E1008 07:36:14.296224 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="936e36b3-6be5-4b41-9473-08474ea5443c" containerName="nova-cell1-novncproxy-novncproxy" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.296248 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="936e36b3-6be5-4b41-9473-08474ea5443c" containerName="nova-cell1-novncproxy-novncproxy" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.298725 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="936e36b3-6be5-4b41-9473-08474ea5443c" containerName="nova-cell1-novncproxy-novncproxy" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.299357 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.301306 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.301680 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.301759 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.311578 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.371958 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55d9s\" (UniqueName: \"kubernetes.io/projected/532a7d37-716c-43b9-b417-8f9ab3ed3dcf-kube-api-access-55d9s\") pod \"nova-cell1-novncproxy-0\" (UID: \"532a7d37-716c-43b9-b417-8f9ab3ed3dcf\") " pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.372022 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/532a7d37-716c-43b9-b417-8f9ab3ed3dcf-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"532a7d37-716c-43b9-b417-8f9ab3ed3dcf\") " pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.372079 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/532a7d37-716c-43b9-b417-8f9ab3ed3dcf-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"532a7d37-716c-43b9-b417-8f9ab3ed3dcf\") " pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.372105 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/532a7d37-716c-43b9-b417-8f9ab3ed3dcf-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"532a7d37-716c-43b9-b417-8f9ab3ed3dcf\") " pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.372187 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/532a7d37-716c-43b9-b417-8f9ab3ed3dcf-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"532a7d37-716c-43b9-b417-8f9ab3ed3dcf\") " pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.474554 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/532a7d37-716c-43b9-b417-8f9ab3ed3dcf-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"532a7d37-716c-43b9-b417-8f9ab3ed3dcf\") " pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.474967 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/532a7d37-716c-43b9-b417-8f9ab3ed3dcf-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"532a7d37-716c-43b9-b417-8f9ab3ed3dcf\") " pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.475345 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/532a7d37-716c-43b9-b417-8f9ab3ed3dcf-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"532a7d37-716c-43b9-b417-8f9ab3ed3dcf\") " pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.475568 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55d9s\" (UniqueName: \"kubernetes.io/projected/532a7d37-716c-43b9-b417-8f9ab3ed3dcf-kube-api-access-55d9s\") pod \"nova-cell1-novncproxy-0\" (UID: \"532a7d37-716c-43b9-b417-8f9ab3ed3dcf\") " pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.475852 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/532a7d37-716c-43b9-b417-8f9ab3ed3dcf-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"532a7d37-716c-43b9-b417-8f9ab3ed3dcf\") " pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.480465 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/532a7d37-716c-43b9-b417-8f9ab3ed3dcf-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"532a7d37-716c-43b9-b417-8f9ab3ed3dcf\") " pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.481220 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/532a7d37-716c-43b9-b417-8f9ab3ed3dcf-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"532a7d37-716c-43b9-b417-8f9ab3ed3dcf\") " pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.481402 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/532a7d37-716c-43b9-b417-8f9ab3ed3dcf-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"532a7d37-716c-43b9-b417-8f9ab3ed3dcf\") " pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.482073 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/532a7d37-716c-43b9-b417-8f9ab3ed3dcf-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"532a7d37-716c-43b9-b417-8f9ab3ed3dcf\") " pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.494797 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55d9s\" (UniqueName: \"kubernetes.io/projected/532a7d37-716c-43b9-b417-8f9ab3ed3dcf-kube-api-access-55d9s\") pod \"nova-cell1-novncproxy-0\" (UID: \"532a7d37-716c-43b9-b417-8f9ab3ed3dcf\") " pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.668208 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:36:14 crc kubenswrapper[4693]: I1008 07:36:14.988557 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 08 07:36:14 crc kubenswrapper[4693]: W1008 07:36:14.996997 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod532a7d37_716c_43b9_b417_8f9ab3ed3dcf.slice/crio-9691797fa3c2452d25f9732e70d4907276f17adf59c02d60d68524fddc394539 WatchSource:0}: Error finding container 9691797fa3c2452d25f9732e70d4907276f17adf59c02d60d68524fddc394539: Status 404 returned error can't find the container with id 9691797fa3c2452d25f9732e70d4907276f17adf59c02d60d68524fddc394539 Oct 08 07:36:15 crc kubenswrapper[4693]: I1008 07:36:15.381637 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="936e36b3-6be5-4b41-9473-08474ea5443c" path="/var/lib/kubelet/pods/936e36b3-6be5-4b41-9473-08474ea5443c/volumes" Oct 08 07:36:15 crc kubenswrapper[4693]: I1008 07:36:15.955274 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"532a7d37-716c-43b9-b417-8f9ab3ed3dcf","Type":"ContainerStarted","Data":"16efef053665a3c9f50f9ccd5d8e69e814912699ba139053a362006e1c1416e9"} Oct 08 07:36:15 crc kubenswrapper[4693]: I1008 07:36:15.955683 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"532a7d37-716c-43b9-b417-8f9ab3ed3dcf","Type":"ContainerStarted","Data":"9691797fa3c2452d25f9732e70d4907276f17adf59c02d60d68524fddc394539"} Oct 08 07:36:15 crc kubenswrapper[4693]: I1008 07:36:15.988770 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.988745939 podStartE2EDuration="1.988745939s" podCreationTimestamp="2025-10-08 07:36:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:36:15.985511614 +0000 UTC m=+1161.356476549" watchObservedRunningTime="2025-10-08 07:36:15.988745939 +0000 UTC m=+1161.359710894" Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.062973 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.063033 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.063407 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.065195 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.068635 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.070645 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.288711 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-r9btl"] Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.291600 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.329097 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-r9btl"] Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.443799 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-r9btl\" (UID: \"d423401e-dd93-4c22-a0f4-1af916d772a5\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.444162 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttcxc\" (UniqueName: \"kubernetes.io/projected/d423401e-dd93-4c22-a0f4-1af916d772a5-kube-api-access-ttcxc\") pod \"dnsmasq-dns-89c5cd4d5-r9btl\" (UID: \"d423401e-dd93-4c22-a0f4-1af916d772a5\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.444342 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-config\") pod \"dnsmasq-dns-89c5cd4d5-r9btl\" (UID: \"d423401e-dd93-4c22-a0f4-1af916d772a5\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.445640 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-r9btl\" (UID: \"d423401e-dd93-4c22-a0f4-1af916d772a5\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.445871 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-r9btl\" (UID: \"d423401e-dd93-4c22-a0f4-1af916d772a5\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.446063 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-r9btl\" (UID: \"d423401e-dd93-4c22-a0f4-1af916d772a5\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.548439 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-r9btl\" (UID: \"d423401e-dd93-4c22-a0f4-1af916d772a5\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.549823 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttcxc\" (UniqueName: \"kubernetes.io/projected/d423401e-dd93-4c22-a0f4-1af916d772a5-kube-api-access-ttcxc\") pod \"dnsmasq-dns-89c5cd4d5-r9btl\" (UID: \"d423401e-dd93-4c22-a0f4-1af916d772a5\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.550290 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-config\") pod \"dnsmasq-dns-89c5cd4d5-r9btl\" (UID: \"d423401e-dd93-4c22-a0f4-1af916d772a5\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.551014 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-r9btl\" (UID: \"d423401e-dd93-4c22-a0f4-1af916d772a5\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.551676 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-r9btl\" (UID: \"d423401e-dd93-4c22-a0f4-1af916d772a5\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.551784 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-r9btl\" (UID: \"d423401e-dd93-4c22-a0f4-1af916d772a5\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.551592 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-r9btl\" (UID: \"d423401e-dd93-4c22-a0f4-1af916d772a5\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.550974 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-config\") pod \"dnsmasq-dns-89c5cd4d5-r9btl\" (UID: \"d423401e-dd93-4c22-a0f4-1af916d772a5\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.552774 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-r9btl\" (UID: \"d423401e-dd93-4c22-a0f4-1af916d772a5\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.549758 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-r9btl\" (UID: \"d423401e-dd93-4c22-a0f4-1af916d772a5\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.553831 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-r9btl\" (UID: \"d423401e-dd93-4c22-a0f4-1af916d772a5\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.579736 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttcxc\" (UniqueName: \"kubernetes.io/projected/d423401e-dd93-4c22-a0f4-1af916d772a5-kube-api-access-ttcxc\") pod \"dnsmasq-dns-89c5cd4d5-r9btl\" (UID: \"d423401e-dd93-4c22-a0f4-1af916d772a5\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" Oct 08 07:36:17 crc kubenswrapper[4693]: I1008 07:36:17.612972 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" Oct 08 07:36:18 crc kubenswrapper[4693]: I1008 07:36:18.083615 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-r9btl"] Oct 08 07:36:18 crc kubenswrapper[4693]: W1008 07:36:18.090007 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd423401e_dd93_4c22_a0f4_1af916d772a5.slice/crio-6228df7d5147e9425aa617bdb3c1c05758ed17075159123971246436617665e4 WatchSource:0}: Error finding container 6228df7d5147e9425aa617bdb3c1c05758ed17075159123971246436617665e4: Status 404 returned error can't find the container with id 6228df7d5147e9425aa617bdb3c1c05758ed17075159123971246436617665e4 Oct 08 07:36:18 crc kubenswrapper[4693]: I1008 07:36:18.992267 4693 generic.go:334] "Generic (PLEG): container finished" podID="d423401e-dd93-4c22-a0f4-1af916d772a5" containerID="5e4ace75a34a01b5fad416889ab8a034d411c6db784164b3c165b50d8ad84a5e" exitCode=0 Oct 08 07:36:18 crc kubenswrapper[4693]: I1008 07:36:18.992355 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" event={"ID":"d423401e-dd93-4c22-a0f4-1af916d772a5","Type":"ContainerDied","Data":"5e4ace75a34a01b5fad416889ab8a034d411c6db784164b3c165b50d8ad84a5e"} Oct 08 07:36:18 crc kubenswrapper[4693]: I1008 07:36:18.992879 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" event={"ID":"d423401e-dd93-4c22-a0f4-1af916d772a5","Type":"ContainerStarted","Data":"6228df7d5147e9425aa617bdb3c1c05758ed17075159123971246436617665e4"} Oct 08 07:36:19 crc kubenswrapper[4693]: I1008 07:36:19.202123 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:36:19 crc kubenswrapper[4693]: I1008 07:36:19.202498 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ef71f046-a392-4737-a4f8-e7a5ffdf66a6" containerName="ceilometer-central-agent" containerID="cri-o://f4b0940acc9401f9428e147bede0da8f22a63e3db74bdc97620a8b428d92bd07" gracePeriod=30 Oct 08 07:36:19 crc kubenswrapper[4693]: I1008 07:36:19.202631 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ef71f046-a392-4737-a4f8-e7a5ffdf66a6" containerName="proxy-httpd" containerID="cri-o://e6db77cf85dec276b93411d944a2664d546bd2a0219e653b592854ce89c71868" gracePeriod=30 Oct 08 07:36:19 crc kubenswrapper[4693]: I1008 07:36:19.202669 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ef71f046-a392-4737-a4f8-e7a5ffdf66a6" containerName="sg-core" containerID="cri-o://705ee858fbc5cb39f5bb791663c6f4473de9546a0cdd86ae746e5960b679afe8" gracePeriod=30 Oct 08 07:36:19 crc kubenswrapper[4693]: I1008 07:36:19.202699 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ef71f046-a392-4737-a4f8-e7a5ffdf66a6" containerName="ceilometer-notification-agent" containerID="cri-o://b37720e3b1fd5c07702a322df68b159fddccf43680072dd9da1bb3bcdd9c84f2" gracePeriod=30 Oct 08 07:36:19 crc kubenswrapper[4693]: I1008 07:36:19.211598 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="ef71f046-a392-4737-a4f8-e7a5ffdf66a6" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.196:3000/\": read tcp 10.217.0.2:40308->10.217.0.196:3000: read: connection reset by peer" Oct 08 07:36:19 crc kubenswrapper[4693]: I1008 07:36:19.315318 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 08 07:36:19 crc kubenswrapper[4693]: I1008 07:36:19.669019 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:36:20 crc kubenswrapper[4693]: I1008 07:36:20.004160 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" event={"ID":"d423401e-dd93-4c22-a0f4-1af916d772a5","Type":"ContainerStarted","Data":"d942f8973c8c8b443484efc661f64ec7226535f070cdef83abb0a7a509ec243f"} Oct 08 07:36:20 crc kubenswrapper[4693]: I1008 07:36:20.005092 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" Oct 08 07:36:20 crc kubenswrapper[4693]: I1008 07:36:20.008416 4693 generic.go:334] "Generic (PLEG): container finished" podID="ef71f046-a392-4737-a4f8-e7a5ffdf66a6" containerID="e6db77cf85dec276b93411d944a2664d546bd2a0219e653b592854ce89c71868" exitCode=0 Oct 08 07:36:20 crc kubenswrapper[4693]: I1008 07:36:20.008450 4693 generic.go:334] "Generic (PLEG): container finished" podID="ef71f046-a392-4737-a4f8-e7a5ffdf66a6" containerID="705ee858fbc5cb39f5bb791663c6f4473de9546a0cdd86ae746e5960b679afe8" exitCode=2 Oct 08 07:36:20 crc kubenswrapper[4693]: I1008 07:36:20.008457 4693 generic.go:334] "Generic (PLEG): container finished" podID="ef71f046-a392-4737-a4f8-e7a5ffdf66a6" containerID="f4b0940acc9401f9428e147bede0da8f22a63e3db74bdc97620a8b428d92bd07" exitCode=0 Oct 08 07:36:20 crc kubenswrapper[4693]: I1008 07:36:20.008537 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef71f046-a392-4737-a4f8-e7a5ffdf66a6","Type":"ContainerDied","Data":"e6db77cf85dec276b93411d944a2664d546bd2a0219e653b592854ce89c71868"} Oct 08 07:36:20 crc kubenswrapper[4693]: I1008 07:36:20.008572 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef71f046-a392-4737-a4f8-e7a5ffdf66a6","Type":"ContainerDied","Data":"705ee858fbc5cb39f5bb791663c6f4473de9546a0cdd86ae746e5960b679afe8"} Oct 08 07:36:20 crc kubenswrapper[4693]: I1008 07:36:20.008586 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef71f046-a392-4737-a4f8-e7a5ffdf66a6","Type":"ContainerDied","Data":"f4b0940acc9401f9428e147bede0da8f22a63e3db74bdc97620a8b428d92bd07"} Oct 08 07:36:20 crc kubenswrapper[4693]: I1008 07:36:20.008631 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b3037b33-df0b-41b2-8a21-82b16d98ceea" containerName="nova-api-log" containerID="cri-o://ccec4e693d93421248fb10c020efdee7de15bd4a2c5ea1dff34b9a18c8dc1edf" gracePeriod=30 Oct 08 07:36:20 crc kubenswrapper[4693]: I1008 07:36:20.008685 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b3037b33-df0b-41b2-8a21-82b16d98ceea" containerName="nova-api-api" containerID="cri-o://2187123fe1df6fb7f983953ce88dfbd095410762cee218bbf0a5d2748c7c5972" gracePeriod=30 Oct 08 07:36:20 crc kubenswrapper[4693]: I1008 07:36:20.038185 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" podStartSLOduration=3.038154356 podStartE2EDuration="3.038154356s" podCreationTimestamp="2025-10-08 07:36:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:36:20.029526957 +0000 UTC m=+1165.400491922" watchObservedRunningTime="2025-10-08 07:36:20.038154356 +0000 UTC m=+1165.409119331" Oct 08 07:36:20 crc kubenswrapper[4693]: I1008 07:36:20.969364 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.017971 4693 generic.go:334] "Generic (PLEG): container finished" podID="b3037b33-df0b-41b2-8a21-82b16d98ceea" containerID="ccec4e693d93421248fb10c020efdee7de15bd4a2c5ea1dff34b9a18c8dc1edf" exitCode=143 Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.018026 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b3037b33-df0b-41b2-8a21-82b16d98ceea","Type":"ContainerDied","Data":"ccec4e693d93421248fb10c020efdee7de15bd4a2c5ea1dff34b9a18c8dc1edf"} Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.020193 4693 generic.go:334] "Generic (PLEG): container finished" podID="ef71f046-a392-4737-a4f8-e7a5ffdf66a6" containerID="b37720e3b1fd5c07702a322df68b159fddccf43680072dd9da1bb3bcdd9c84f2" exitCode=0 Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.021080 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.021531 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef71f046-a392-4737-a4f8-e7a5ffdf66a6","Type":"ContainerDied","Data":"b37720e3b1fd5c07702a322df68b159fddccf43680072dd9da1bb3bcdd9c84f2"} Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.021553 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef71f046-a392-4737-a4f8-e7a5ffdf66a6","Type":"ContainerDied","Data":"e49957ccc08afdbfbc75d54b3bbbaaebbc7cda4e50cabb80cbc6a4665e156259"} Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.021568 4693 scope.go:117] "RemoveContainer" containerID="e6db77cf85dec276b93411d944a2664d546bd2a0219e653b592854ce89c71868" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.045176 4693 scope.go:117] "RemoveContainer" containerID="705ee858fbc5cb39f5bb791663c6f4473de9546a0cdd86ae746e5960b679afe8" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.065002 4693 scope.go:117] "RemoveContainer" containerID="b37720e3b1fd5c07702a322df68b159fddccf43680072dd9da1bb3bcdd9c84f2" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.091024 4693 scope.go:117] "RemoveContainer" containerID="f4b0940acc9401f9428e147bede0da8f22a63e3db74bdc97620a8b428d92bd07" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.112185 4693 scope.go:117] "RemoveContainer" containerID="e6db77cf85dec276b93411d944a2664d546bd2a0219e653b592854ce89c71868" Oct 08 07:36:21 crc kubenswrapper[4693]: E1008 07:36:21.112605 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6db77cf85dec276b93411d944a2664d546bd2a0219e653b592854ce89c71868\": container with ID starting with e6db77cf85dec276b93411d944a2664d546bd2a0219e653b592854ce89c71868 not found: ID does not exist" containerID="e6db77cf85dec276b93411d944a2664d546bd2a0219e653b592854ce89c71868" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.112637 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6db77cf85dec276b93411d944a2664d546bd2a0219e653b592854ce89c71868"} err="failed to get container status \"e6db77cf85dec276b93411d944a2664d546bd2a0219e653b592854ce89c71868\": rpc error: code = NotFound desc = could not find container \"e6db77cf85dec276b93411d944a2664d546bd2a0219e653b592854ce89c71868\": container with ID starting with e6db77cf85dec276b93411d944a2664d546bd2a0219e653b592854ce89c71868 not found: ID does not exist" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.112657 4693 scope.go:117] "RemoveContainer" containerID="705ee858fbc5cb39f5bb791663c6f4473de9546a0cdd86ae746e5960b679afe8" Oct 08 07:36:21 crc kubenswrapper[4693]: E1008 07:36:21.113035 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"705ee858fbc5cb39f5bb791663c6f4473de9546a0cdd86ae746e5960b679afe8\": container with ID starting with 705ee858fbc5cb39f5bb791663c6f4473de9546a0cdd86ae746e5960b679afe8 not found: ID does not exist" containerID="705ee858fbc5cb39f5bb791663c6f4473de9546a0cdd86ae746e5960b679afe8" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.113055 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"705ee858fbc5cb39f5bb791663c6f4473de9546a0cdd86ae746e5960b679afe8"} err="failed to get container status \"705ee858fbc5cb39f5bb791663c6f4473de9546a0cdd86ae746e5960b679afe8\": rpc error: code = NotFound desc = could not find container \"705ee858fbc5cb39f5bb791663c6f4473de9546a0cdd86ae746e5960b679afe8\": container with ID starting with 705ee858fbc5cb39f5bb791663c6f4473de9546a0cdd86ae746e5960b679afe8 not found: ID does not exist" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.113068 4693 scope.go:117] "RemoveContainer" containerID="b37720e3b1fd5c07702a322df68b159fddccf43680072dd9da1bb3bcdd9c84f2" Oct 08 07:36:21 crc kubenswrapper[4693]: E1008 07:36:21.113424 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b37720e3b1fd5c07702a322df68b159fddccf43680072dd9da1bb3bcdd9c84f2\": container with ID starting with b37720e3b1fd5c07702a322df68b159fddccf43680072dd9da1bb3bcdd9c84f2 not found: ID does not exist" containerID="b37720e3b1fd5c07702a322df68b159fddccf43680072dd9da1bb3bcdd9c84f2" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.113448 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b37720e3b1fd5c07702a322df68b159fddccf43680072dd9da1bb3bcdd9c84f2"} err="failed to get container status \"b37720e3b1fd5c07702a322df68b159fddccf43680072dd9da1bb3bcdd9c84f2\": rpc error: code = NotFound desc = could not find container \"b37720e3b1fd5c07702a322df68b159fddccf43680072dd9da1bb3bcdd9c84f2\": container with ID starting with b37720e3b1fd5c07702a322df68b159fddccf43680072dd9da1bb3bcdd9c84f2 not found: ID does not exist" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.113461 4693 scope.go:117] "RemoveContainer" containerID="f4b0940acc9401f9428e147bede0da8f22a63e3db74bdc97620a8b428d92bd07" Oct 08 07:36:21 crc kubenswrapper[4693]: E1008 07:36:21.113698 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4b0940acc9401f9428e147bede0da8f22a63e3db74bdc97620a8b428d92bd07\": container with ID starting with f4b0940acc9401f9428e147bede0da8f22a63e3db74bdc97620a8b428d92bd07 not found: ID does not exist" containerID="f4b0940acc9401f9428e147bede0da8f22a63e3db74bdc97620a8b428d92bd07" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.113713 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4b0940acc9401f9428e147bede0da8f22a63e3db74bdc97620a8b428d92bd07"} err="failed to get container status \"f4b0940acc9401f9428e147bede0da8f22a63e3db74bdc97620a8b428d92bd07\": rpc error: code = NotFound desc = could not find container \"f4b0940acc9401f9428e147bede0da8f22a63e3db74bdc97620a8b428d92bd07\": container with ID starting with f4b0940acc9401f9428e147bede0da8f22a63e3db74bdc97620a8b428d92bd07 not found: ID does not exist" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.135403 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-ceilometer-tls-certs\") pod \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.135447 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-sg-core-conf-yaml\") pod \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.136179 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-combined-ca-bundle\") pod \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.136259 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-log-httpd\") pod \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.136304 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-run-httpd\") pod \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.136570 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-config-data\") pod \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.136702 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-scripts\") pod \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.136919 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ef71f046-a392-4737-a4f8-e7a5ffdf66a6" (UID: "ef71f046-a392-4737-a4f8-e7a5ffdf66a6"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.137073 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ef71f046-a392-4737-a4f8-e7a5ffdf66a6" (UID: "ef71f046-a392-4737-a4f8-e7a5ffdf66a6"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.137182 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ltfrs\" (UniqueName: \"kubernetes.io/projected/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-kube-api-access-ltfrs\") pod \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\" (UID: \"ef71f046-a392-4737-a4f8-e7a5ffdf66a6\") " Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.137641 4693 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.137656 4693 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.141527 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-kube-api-access-ltfrs" (OuterVolumeSpecName: "kube-api-access-ltfrs") pod "ef71f046-a392-4737-a4f8-e7a5ffdf66a6" (UID: "ef71f046-a392-4737-a4f8-e7a5ffdf66a6"). InnerVolumeSpecName "kube-api-access-ltfrs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.142182 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-scripts" (OuterVolumeSpecName: "scripts") pod "ef71f046-a392-4737-a4f8-e7a5ffdf66a6" (UID: "ef71f046-a392-4737-a4f8-e7a5ffdf66a6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.180540 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ef71f046-a392-4737-a4f8-e7a5ffdf66a6" (UID: "ef71f046-a392-4737-a4f8-e7a5ffdf66a6"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.223512 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "ef71f046-a392-4737-a4f8-e7a5ffdf66a6" (UID: "ef71f046-a392-4737-a4f8-e7a5ffdf66a6"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.239048 4693 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-scripts\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.239083 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ltfrs\" (UniqueName: \"kubernetes.io/projected/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-kube-api-access-ltfrs\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.239096 4693 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.239104 4693 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.253607 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-config-data" (OuterVolumeSpecName: "config-data") pod "ef71f046-a392-4737-a4f8-e7a5ffdf66a6" (UID: "ef71f046-a392-4737-a4f8-e7a5ffdf66a6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.263763 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ef71f046-a392-4737-a4f8-e7a5ffdf66a6" (UID: "ef71f046-a392-4737-a4f8-e7a5ffdf66a6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.340451 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.340494 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef71f046-a392-4737-a4f8-e7a5ffdf66a6-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.355413 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.380931 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.385233 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:36:21 crc kubenswrapper[4693]: E1008 07:36:21.385738 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef71f046-a392-4737-a4f8-e7a5ffdf66a6" containerName="proxy-httpd" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.385759 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef71f046-a392-4737-a4f8-e7a5ffdf66a6" containerName="proxy-httpd" Oct 08 07:36:21 crc kubenswrapper[4693]: E1008 07:36:21.385773 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef71f046-a392-4737-a4f8-e7a5ffdf66a6" containerName="ceilometer-notification-agent" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.385783 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef71f046-a392-4737-a4f8-e7a5ffdf66a6" containerName="ceilometer-notification-agent" Oct 08 07:36:21 crc kubenswrapper[4693]: E1008 07:36:21.385822 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef71f046-a392-4737-a4f8-e7a5ffdf66a6" containerName="ceilometer-central-agent" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.385832 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef71f046-a392-4737-a4f8-e7a5ffdf66a6" containerName="ceilometer-central-agent" Oct 08 07:36:21 crc kubenswrapper[4693]: E1008 07:36:21.385862 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef71f046-a392-4737-a4f8-e7a5ffdf66a6" containerName="sg-core" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.385870 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef71f046-a392-4737-a4f8-e7a5ffdf66a6" containerName="sg-core" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.386121 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef71f046-a392-4737-a4f8-e7a5ffdf66a6" containerName="ceilometer-notification-agent" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.386151 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef71f046-a392-4737-a4f8-e7a5ffdf66a6" containerName="sg-core" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.386166 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef71f046-a392-4737-a4f8-e7a5ffdf66a6" containerName="proxy-httpd" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.386179 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef71f046-a392-4737-a4f8-e7a5ffdf66a6" containerName="ceilometer-central-agent" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.388328 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.390962 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.392169 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.392353 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.392684 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.544122 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68fc1107-d02a-4138-bd74-648778e9302d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"68fc1107-d02a-4138-bd74-648778e9302d\") " pod="openstack/ceilometer-0" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.544194 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68fc1107-d02a-4138-bd74-648778e9302d-config-data\") pod \"ceilometer-0\" (UID: \"68fc1107-d02a-4138-bd74-648778e9302d\") " pod="openstack/ceilometer-0" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.544219 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/68fc1107-d02a-4138-bd74-648778e9302d-run-httpd\") pod \"ceilometer-0\" (UID: \"68fc1107-d02a-4138-bd74-648778e9302d\") " pod="openstack/ceilometer-0" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.544393 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kshvz\" (UniqueName: \"kubernetes.io/projected/68fc1107-d02a-4138-bd74-648778e9302d-kube-api-access-kshvz\") pod \"ceilometer-0\" (UID: \"68fc1107-d02a-4138-bd74-648778e9302d\") " pod="openstack/ceilometer-0" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.544516 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/68fc1107-d02a-4138-bd74-648778e9302d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"68fc1107-d02a-4138-bd74-648778e9302d\") " pod="openstack/ceilometer-0" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.544568 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/68fc1107-d02a-4138-bd74-648778e9302d-log-httpd\") pod \"ceilometer-0\" (UID: \"68fc1107-d02a-4138-bd74-648778e9302d\") " pod="openstack/ceilometer-0" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.544766 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/68fc1107-d02a-4138-bd74-648778e9302d-scripts\") pod \"ceilometer-0\" (UID: \"68fc1107-d02a-4138-bd74-648778e9302d\") " pod="openstack/ceilometer-0" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.544900 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/68fc1107-d02a-4138-bd74-648778e9302d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"68fc1107-d02a-4138-bd74-648778e9302d\") " pod="openstack/ceilometer-0" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.646585 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/68fc1107-d02a-4138-bd74-648778e9302d-scripts\") pod \"ceilometer-0\" (UID: \"68fc1107-d02a-4138-bd74-648778e9302d\") " pod="openstack/ceilometer-0" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.646645 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/68fc1107-d02a-4138-bd74-648778e9302d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"68fc1107-d02a-4138-bd74-648778e9302d\") " pod="openstack/ceilometer-0" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.646686 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68fc1107-d02a-4138-bd74-648778e9302d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"68fc1107-d02a-4138-bd74-648778e9302d\") " pod="openstack/ceilometer-0" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.646719 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68fc1107-d02a-4138-bd74-648778e9302d-config-data\") pod \"ceilometer-0\" (UID: \"68fc1107-d02a-4138-bd74-648778e9302d\") " pod="openstack/ceilometer-0" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.646740 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/68fc1107-d02a-4138-bd74-648778e9302d-run-httpd\") pod \"ceilometer-0\" (UID: \"68fc1107-d02a-4138-bd74-648778e9302d\") " pod="openstack/ceilometer-0" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.646766 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kshvz\" (UniqueName: \"kubernetes.io/projected/68fc1107-d02a-4138-bd74-648778e9302d-kube-api-access-kshvz\") pod \"ceilometer-0\" (UID: \"68fc1107-d02a-4138-bd74-648778e9302d\") " pod="openstack/ceilometer-0" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.646798 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/68fc1107-d02a-4138-bd74-648778e9302d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"68fc1107-d02a-4138-bd74-648778e9302d\") " pod="openstack/ceilometer-0" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.646831 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/68fc1107-d02a-4138-bd74-648778e9302d-log-httpd\") pod \"ceilometer-0\" (UID: \"68fc1107-d02a-4138-bd74-648778e9302d\") " pod="openstack/ceilometer-0" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.647206 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/68fc1107-d02a-4138-bd74-648778e9302d-log-httpd\") pod \"ceilometer-0\" (UID: \"68fc1107-d02a-4138-bd74-648778e9302d\") " pod="openstack/ceilometer-0" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.647414 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/68fc1107-d02a-4138-bd74-648778e9302d-run-httpd\") pod \"ceilometer-0\" (UID: \"68fc1107-d02a-4138-bd74-648778e9302d\") " pod="openstack/ceilometer-0" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.654595 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/68fc1107-d02a-4138-bd74-648778e9302d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"68fc1107-d02a-4138-bd74-648778e9302d\") " pod="openstack/ceilometer-0" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.654606 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68fc1107-d02a-4138-bd74-648778e9302d-config-data\") pod \"ceilometer-0\" (UID: \"68fc1107-d02a-4138-bd74-648778e9302d\") " pod="openstack/ceilometer-0" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.654669 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/68fc1107-d02a-4138-bd74-648778e9302d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"68fc1107-d02a-4138-bd74-648778e9302d\") " pod="openstack/ceilometer-0" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.654613 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/68fc1107-d02a-4138-bd74-648778e9302d-scripts\") pod \"ceilometer-0\" (UID: \"68fc1107-d02a-4138-bd74-648778e9302d\") " pod="openstack/ceilometer-0" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.655081 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68fc1107-d02a-4138-bd74-648778e9302d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"68fc1107-d02a-4138-bd74-648778e9302d\") " pod="openstack/ceilometer-0" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.664111 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kshvz\" (UniqueName: \"kubernetes.io/projected/68fc1107-d02a-4138-bd74-648778e9302d-kube-api-access-kshvz\") pod \"ceilometer-0\" (UID: \"68fc1107-d02a-4138-bd74-648778e9302d\") " pod="openstack/ceilometer-0" Oct 08 07:36:21 crc kubenswrapper[4693]: I1008 07:36:21.740530 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 08 07:36:22 crc kubenswrapper[4693]: I1008 07:36:22.241626 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 08 07:36:23 crc kubenswrapper[4693]: I1008 07:36:23.042414 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"68fc1107-d02a-4138-bd74-648778e9302d","Type":"ContainerStarted","Data":"7ee401010effbfd26733ea90c15894866e2229dea13a862346a21d374ff72d93"} Oct 08 07:36:23 crc kubenswrapper[4693]: I1008 07:36:23.042708 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"68fc1107-d02a-4138-bd74-648778e9302d","Type":"ContainerStarted","Data":"847b01933bcb5810302dbbbe3df91fe733d7d4438d9cc8559c6040c439bd8888"} Oct 08 07:36:23 crc kubenswrapper[4693]: I1008 07:36:23.372734 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef71f046-a392-4737-a4f8-e7a5ffdf66a6" path="/var/lib/kubelet/pods/ef71f046-a392-4737-a4f8-e7a5ffdf66a6/volumes" Oct 08 07:36:24 crc kubenswrapper[4693]: I1008 07:36:24.051327 4693 generic.go:334] "Generic (PLEG): container finished" podID="b3037b33-df0b-41b2-8a21-82b16d98ceea" containerID="2187123fe1df6fb7f983953ce88dfbd095410762cee218bbf0a5d2748c7c5972" exitCode=0 Oct 08 07:36:24 crc kubenswrapper[4693]: I1008 07:36:24.051895 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b3037b33-df0b-41b2-8a21-82b16d98ceea","Type":"ContainerDied","Data":"2187123fe1df6fb7f983953ce88dfbd095410762cee218bbf0a5d2748c7c5972"} Oct 08 07:36:24 crc kubenswrapper[4693]: I1008 07:36:24.052018 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b3037b33-df0b-41b2-8a21-82b16d98ceea","Type":"ContainerDied","Data":"ef138bc8d4c4a0549ac8c6a2ccfdde64cb456d3d90029550321c8acd828c96a6"} Oct 08 07:36:24 crc kubenswrapper[4693]: I1008 07:36:24.052072 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef138bc8d4c4a0549ac8c6a2ccfdde64cb456d3d90029550321c8acd828c96a6" Oct 08 07:36:24 crc kubenswrapper[4693]: I1008 07:36:24.067696 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"68fc1107-d02a-4138-bd74-648778e9302d","Type":"ContainerStarted","Data":"d57fbc9a4d49e606363f4cc8390ec46b9a8bcd697533538c92526cfc6517a9fa"} Oct 08 07:36:24 crc kubenswrapper[4693]: I1008 07:36:24.090140 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 08 07:36:24 crc kubenswrapper[4693]: I1008 07:36:24.196900 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9s52\" (UniqueName: \"kubernetes.io/projected/b3037b33-df0b-41b2-8a21-82b16d98ceea-kube-api-access-z9s52\") pod \"b3037b33-df0b-41b2-8a21-82b16d98ceea\" (UID: \"b3037b33-df0b-41b2-8a21-82b16d98ceea\") " Oct 08 07:36:24 crc kubenswrapper[4693]: I1008 07:36:24.197094 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3037b33-df0b-41b2-8a21-82b16d98ceea-combined-ca-bundle\") pod \"b3037b33-df0b-41b2-8a21-82b16d98ceea\" (UID: \"b3037b33-df0b-41b2-8a21-82b16d98ceea\") " Oct 08 07:36:24 crc kubenswrapper[4693]: I1008 07:36:24.197124 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3037b33-df0b-41b2-8a21-82b16d98ceea-logs\") pod \"b3037b33-df0b-41b2-8a21-82b16d98ceea\" (UID: \"b3037b33-df0b-41b2-8a21-82b16d98ceea\") " Oct 08 07:36:24 crc kubenswrapper[4693]: I1008 07:36:24.197210 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3037b33-df0b-41b2-8a21-82b16d98ceea-config-data\") pod \"b3037b33-df0b-41b2-8a21-82b16d98ceea\" (UID: \"b3037b33-df0b-41b2-8a21-82b16d98ceea\") " Oct 08 07:36:24 crc kubenswrapper[4693]: I1008 07:36:24.198565 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3037b33-df0b-41b2-8a21-82b16d98ceea-logs" (OuterVolumeSpecName: "logs") pod "b3037b33-df0b-41b2-8a21-82b16d98ceea" (UID: "b3037b33-df0b-41b2-8a21-82b16d98ceea"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:36:24 crc kubenswrapper[4693]: I1008 07:36:24.205380 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3037b33-df0b-41b2-8a21-82b16d98ceea-kube-api-access-z9s52" (OuterVolumeSpecName: "kube-api-access-z9s52") pod "b3037b33-df0b-41b2-8a21-82b16d98ceea" (UID: "b3037b33-df0b-41b2-8a21-82b16d98ceea"). InnerVolumeSpecName "kube-api-access-z9s52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:36:24 crc kubenswrapper[4693]: I1008 07:36:24.227150 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3037b33-df0b-41b2-8a21-82b16d98ceea-config-data" (OuterVolumeSpecName: "config-data") pod "b3037b33-df0b-41b2-8a21-82b16d98ceea" (UID: "b3037b33-df0b-41b2-8a21-82b16d98ceea"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:36:24 crc kubenswrapper[4693]: I1008 07:36:24.248514 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3037b33-df0b-41b2-8a21-82b16d98ceea-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b3037b33-df0b-41b2-8a21-82b16d98ceea" (UID: "b3037b33-df0b-41b2-8a21-82b16d98ceea"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:36:24 crc kubenswrapper[4693]: I1008 07:36:24.299611 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3037b33-df0b-41b2-8a21-82b16d98ceea-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:24 crc kubenswrapper[4693]: I1008 07:36:24.299645 4693 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3037b33-df0b-41b2-8a21-82b16d98ceea-logs\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:24 crc kubenswrapper[4693]: I1008 07:36:24.299654 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3037b33-df0b-41b2-8a21-82b16d98ceea-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:24 crc kubenswrapper[4693]: I1008 07:36:24.299663 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9s52\" (UniqueName: \"kubernetes.io/projected/b3037b33-df0b-41b2-8a21-82b16d98ceea-kube-api-access-z9s52\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:24 crc kubenswrapper[4693]: I1008 07:36:24.671085 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:36:24 crc kubenswrapper[4693]: I1008 07:36:24.685211 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.084462 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"68fc1107-d02a-4138-bd74-648778e9302d","Type":"ContainerStarted","Data":"9baacc39151b7bd6105f4a496e31698119e02e75d55a4626afcc02219677b477"} Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.084521 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.117555 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.139889 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.159319 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.185510 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 08 07:36:25 crc kubenswrapper[4693]: E1008 07:36:25.186195 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3037b33-df0b-41b2-8a21-82b16d98ceea" containerName="nova-api-log" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.186218 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3037b33-df0b-41b2-8a21-82b16d98ceea" containerName="nova-api-log" Oct 08 07:36:25 crc kubenswrapper[4693]: E1008 07:36:25.186254 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3037b33-df0b-41b2-8a21-82b16d98ceea" containerName="nova-api-api" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.186267 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3037b33-df0b-41b2-8a21-82b16d98ceea" containerName="nova-api-api" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.186598 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3037b33-df0b-41b2-8a21-82b16d98ceea" containerName="nova-api-api" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.186634 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3037b33-df0b-41b2-8a21-82b16d98ceea" containerName="nova-api-log" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.188086 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.194749 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.195134 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.195331 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.195951 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.317994 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/270aaa79-877c-4a9e-8503-282dafc036d5-logs\") pod \"nova-api-0\" (UID: \"270aaa79-877c-4a9e-8503-282dafc036d5\") " pod="openstack/nova-api-0" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.318383 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/270aaa79-877c-4a9e-8503-282dafc036d5-config-data\") pod \"nova-api-0\" (UID: \"270aaa79-877c-4a9e-8503-282dafc036d5\") " pod="openstack/nova-api-0" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.318475 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/270aaa79-877c-4a9e-8503-282dafc036d5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"270aaa79-877c-4a9e-8503-282dafc036d5\") " pod="openstack/nova-api-0" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.318515 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/270aaa79-877c-4a9e-8503-282dafc036d5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"270aaa79-877c-4a9e-8503-282dafc036d5\") " pod="openstack/nova-api-0" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.318558 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/270aaa79-877c-4a9e-8503-282dafc036d5-public-tls-certs\") pod \"nova-api-0\" (UID: \"270aaa79-877c-4a9e-8503-282dafc036d5\") " pod="openstack/nova-api-0" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.318765 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66dsv\" (UniqueName: \"kubernetes.io/projected/270aaa79-877c-4a9e-8503-282dafc036d5-kube-api-access-66dsv\") pod \"nova-api-0\" (UID: \"270aaa79-877c-4a9e-8503-282dafc036d5\") " pod="openstack/nova-api-0" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.381796 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3037b33-df0b-41b2-8a21-82b16d98ceea" path="/var/lib/kubelet/pods/b3037b33-df0b-41b2-8a21-82b16d98ceea/volumes" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.383958 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-mtk9c"] Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.385415 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-mtk9c" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.388939 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.389054 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.402746 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-mtk9c"] Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.421146 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/270aaa79-877c-4a9e-8503-282dafc036d5-public-tls-certs\") pod \"nova-api-0\" (UID: \"270aaa79-877c-4a9e-8503-282dafc036d5\") " pod="openstack/nova-api-0" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.421283 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66dsv\" (UniqueName: \"kubernetes.io/projected/270aaa79-877c-4a9e-8503-282dafc036d5-kube-api-access-66dsv\") pod \"nova-api-0\" (UID: \"270aaa79-877c-4a9e-8503-282dafc036d5\") " pod="openstack/nova-api-0" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.421399 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/270aaa79-877c-4a9e-8503-282dafc036d5-logs\") pod \"nova-api-0\" (UID: \"270aaa79-877c-4a9e-8503-282dafc036d5\") " pod="openstack/nova-api-0" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.421493 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/270aaa79-877c-4a9e-8503-282dafc036d5-config-data\") pod \"nova-api-0\" (UID: \"270aaa79-877c-4a9e-8503-282dafc036d5\") " pod="openstack/nova-api-0" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.421527 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/270aaa79-877c-4a9e-8503-282dafc036d5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"270aaa79-877c-4a9e-8503-282dafc036d5\") " pod="openstack/nova-api-0" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.421554 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/270aaa79-877c-4a9e-8503-282dafc036d5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"270aaa79-877c-4a9e-8503-282dafc036d5\") " pod="openstack/nova-api-0" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.422268 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/270aaa79-877c-4a9e-8503-282dafc036d5-logs\") pod \"nova-api-0\" (UID: \"270aaa79-877c-4a9e-8503-282dafc036d5\") " pod="openstack/nova-api-0" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.427934 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/270aaa79-877c-4a9e-8503-282dafc036d5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"270aaa79-877c-4a9e-8503-282dafc036d5\") " pod="openstack/nova-api-0" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.434552 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/270aaa79-877c-4a9e-8503-282dafc036d5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"270aaa79-877c-4a9e-8503-282dafc036d5\") " pod="openstack/nova-api-0" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.439638 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/270aaa79-877c-4a9e-8503-282dafc036d5-public-tls-certs\") pod \"nova-api-0\" (UID: \"270aaa79-877c-4a9e-8503-282dafc036d5\") " pod="openstack/nova-api-0" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.442516 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66dsv\" (UniqueName: \"kubernetes.io/projected/270aaa79-877c-4a9e-8503-282dafc036d5-kube-api-access-66dsv\") pod \"nova-api-0\" (UID: \"270aaa79-877c-4a9e-8503-282dafc036d5\") " pod="openstack/nova-api-0" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.445991 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/270aaa79-877c-4a9e-8503-282dafc036d5-config-data\") pod \"nova-api-0\" (UID: \"270aaa79-877c-4a9e-8503-282dafc036d5\") " pod="openstack/nova-api-0" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.512934 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.523665 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a7c286f-f170-49ca-a7e6-9e4c411d4840-config-data\") pod \"nova-cell1-cell-mapping-mtk9c\" (UID: \"8a7c286f-f170-49ca-a7e6-9e4c411d4840\") " pod="openstack/nova-cell1-cell-mapping-mtk9c" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.523894 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a7c286f-f170-49ca-a7e6-9e4c411d4840-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-mtk9c\" (UID: \"8a7c286f-f170-49ca-a7e6-9e4c411d4840\") " pod="openstack/nova-cell1-cell-mapping-mtk9c" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.524178 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8a7c286f-f170-49ca-a7e6-9e4c411d4840-scripts\") pod \"nova-cell1-cell-mapping-mtk9c\" (UID: \"8a7c286f-f170-49ca-a7e6-9e4c411d4840\") " pod="openstack/nova-cell1-cell-mapping-mtk9c" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.524216 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdlwr\" (UniqueName: \"kubernetes.io/projected/8a7c286f-f170-49ca-a7e6-9e4c411d4840-kube-api-access-wdlwr\") pod \"nova-cell1-cell-mapping-mtk9c\" (UID: \"8a7c286f-f170-49ca-a7e6-9e4c411d4840\") " pod="openstack/nova-cell1-cell-mapping-mtk9c" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.626837 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdlwr\" (UniqueName: \"kubernetes.io/projected/8a7c286f-f170-49ca-a7e6-9e4c411d4840-kube-api-access-wdlwr\") pod \"nova-cell1-cell-mapping-mtk9c\" (UID: \"8a7c286f-f170-49ca-a7e6-9e4c411d4840\") " pod="openstack/nova-cell1-cell-mapping-mtk9c" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.627180 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a7c286f-f170-49ca-a7e6-9e4c411d4840-config-data\") pod \"nova-cell1-cell-mapping-mtk9c\" (UID: \"8a7c286f-f170-49ca-a7e6-9e4c411d4840\") " pod="openstack/nova-cell1-cell-mapping-mtk9c" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.627275 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a7c286f-f170-49ca-a7e6-9e4c411d4840-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-mtk9c\" (UID: \"8a7c286f-f170-49ca-a7e6-9e4c411d4840\") " pod="openstack/nova-cell1-cell-mapping-mtk9c" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.627374 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8a7c286f-f170-49ca-a7e6-9e4c411d4840-scripts\") pod \"nova-cell1-cell-mapping-mtk9c\" (UID: \"8a7c286f-f170-49ca-a7e6-9e4c411d4840\") " pod="openstack/nova-cell1-cell-mapping-mtk9c" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.641453 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8a7c286f-f170-49ca-a7e6-9e4c411d4840-scripts\") pod \"nova-cell1-cell-mapping-mtk9c\" (UID: \"8a7c286f-f170-49ca-a7e6-9e4c411d4840\") " pod="openstack/nova-cell1-cell-mapping-mtk9c" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.650393 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a7c286f-f170-49ca-a7e6-9e4c411d4840-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-mtk9c\" (UID: \"8a7c286f-f170-49ca-a7e6-9e4c411d4840\") " pod="openstack/nova-cell1-cell-mapping-mtk9c" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.651948 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdlwr\" (UniqueName: \"kubernetes.io/projected/8a7c286f-f170-49ca-a7e6-9e4c411d4840-kube-api-access-wdlwr\") pod \"nova-cell1-cell-mapping-mtk9c\" (UID: \"8a7c286f-f170-49ca-a7e6-9e4c411d4840\") " pod="openstack/nova-cell1-cell-mapping-mtk9c" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.669060 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a7c286f-f170-49ca-a7e6-9e4c411d4840-config-data\") pod \"nova-cell1-cell-mapping-mtk9c\" (UID: \"8a7c286f-f170-49ca-a7e6-9e4c411d4840\") " pod="openstack/nova-cell1-cell-mapping-mtk9c" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.721351 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-mtk9c" Oct 08 07:36:25 crc kubenswrapper[4693]: I1008 07:36:25.989663 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 08 07:36:25 crc kubenswrapper[4693]: W1008 07:36:25.991503 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod270aaa79_877c_4a9e_8503_282dafc036d5.slice/crio-0b04d40f025eae3fa8edfdc19f45deeaef96c449f5064ff2054d2582791fe1bf WatchSource:0}: Error finding container 0b04d40f025eae3fa8edfdc19f45deeaef96c449f5064ff2054d2582791fe1bf: Status 404 returned error can't find the container with id 0b04d40f025eae3fa8edfdc19f45deeaef96c449f5064ff2054d2582791fe1bf Oct 08 07:36:26 crc kubenswrapper[4693]: I1008 07:36:26.099801 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"68fc1107-d02a-4138-bd74-648778e9302d","Type":"ContainerStarted","Data":"a87bc3e26e305f136c1d955a4addd986165d5d5c08dd87998bc8fc792f51c050"} Oct 08 07:36:26 crc kubenswrapper[4693]: I1008 07:36:26.099949 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 08 07:36:26 crc kubenswrapper[4693]: I1008 07:36:26.102285 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"270aaa79-877c-4a9e-8503-282dafc036d5","Type":"ContainerStarted","Data":"0b04d40f025eae3fa8edfdc19f45deeaef96c449f5064ff2054d2582791fe1bf"} Oct 08 07:36:26 crc kubenswrapper[4693]: I1008 07:36:26.131244 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.584674479 podStartE2EDuration="5.131222312s" podCreationTimestamp="2025-10-08 07:36:21 +0000 UTC" firstStartedPulling="2025-10-08 07:36:22.239061343 +0000 UTC m=+1167.610026318" lastFinishedPulling="2025-10-08 07:36:25.785609206 +0000 UTC m=+1171.156574151" observedRunningTime="2025-10-08 07:36:26.121796972 +0000 UTC m=+1171.492761897" watchObservedRunningTime="2025-10-08 07:36:26.131222312 +0000 UTC m=+1171.502187257" Oct 08 07:36:26 crc kubenswrapper[4693]: I1008 07:36:26.211257 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-mtk9c"] Oct 08 07:36:26 crc kubenswrapper[4693]: W1008 07:36:26.215175 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a7c286f_f170_49ca_a7e6_9e4c411d4840.slice/crio-89877cd7a915eb10f15e41d1ef75ea2c7e8034b5fc0a4b0c7324353e67a619d0 WatchSource:0}: Error finding container 89877cd7a915eb10f15e41d1ef75ea2c7e8034b5fc0a4b0c7324353e67a619d0: Status 404 returned error can't find the container with id 89877cd7a915eb10f15e41d1ef75ea2c7e8034b5fc0a4b0c7324353e67a619d0 Oct 08 07:36:27 crc kubenswrapper[4693]: I1008 07:36:27.115008 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-mtk9c" event={"ID":"8a7c286f-f170-49ca-a7e6-9e4c411d4840","Type":"ContainerStarted","Data":"cef8afda4aec447b6f3834ebd155e491e040a40f584e5d1a87dbb6d1624c6eb2"} Oct 08 07:36:27 crc kubenswrapper[4693]: I1008 07:36:27.115360 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-mtk9c" event={"ID":"8a7c286f-f170-49ca-a7e6-9e4c411d4840","Type":"ContainerStarted","Data":"89877cd7a915eb10f15e41d1ef75ea2c7e8034b5fc0a4b0c7324353e67a619d0"} Oct 08 07:36:27 crc kubenswrapper[4693]: I1008 07:36:27.120655 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"270aaa79-877c-4a9e-8503-282dafc036d5","Type":"ContainerStarted","Data":"42749746ce4182441debb8daca365589e8adb7dd5ef3844f302a3e076f102557"} Oct 08 07:36:27 crc kubenswrapper[4693]: I1008 07:36:27.120722 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"270aaa79-877c-4a9e-8503-282dafc036d5","Type":"ContainerStarted","Data":"c838e1eae51e705dd652fc15323ae80a0fc85d71754b40b2dee7bccf4fabe9dd"} Oct 08 07:36:27 crc kubenswrapper[4693]: I1008 07:36:27.151086 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-mtk9c" podStartSLOduration=2.15105921 podStartE2EDuration="2.15105921s" podCreationTimestamp="2025-10-08 07:36:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:36:27.14201093 +0000 UTC m=+1172.512975925" watchObservedRunningTime="2025-10-08 07:36:27.15105921 +0000 UTC m=+1172.522024175" Oct 08 07:36:27 crc kubenswrapper[4693]: I1008 07:36:27.614012 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" Oct 08 07:36:27 crc kubenswrapper[4693]: I1008 07:36:27.637131 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.637108793 podStartE2EDuration="2.637108793s" podCreationTimestamp="2025-10-08 07:36:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:36:27.171853438 +0000 UTC m=+1172.542818433" watchObservedRunningTime="2025-10-08 07:36:27.637108793 +0000 UTC m=+1173.008073728" Oct 08 07:36:27 crc kubenswrapper[4693]: I1008 07:36:27.693423 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-s92d2"] Oct 08 07:36:27 crc kubenswrapper[4693]: I1008 07:36:27.694388 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-757b4f8459-s92d2" podUID="15bacf5c-3c15-48c3-9c6c-478bd9e5d599" containerName="dnsmasq-dns" containerID="cri-o://58dc5fcdcafbdc23a9631b266d6eb65688b3011c23f7ebc8c76fdfcfe67b74ab" gracePeriod=10 Oct 08 07:36:28 crc kubenswrapper[4693]: I1008 07:36:28.140071 4693 generic.go:334] "Generic (PLEG): container finished" podID="15bacf5c-3c15-48c3-9c6c-478bd9e5d599" containerID="58dc5fcdcafbdc23a9631b266d6eb65688b3011c23f7ebc8c76fdfcfe67b74ab" exitCode=0 Oct 08 07:36:28 crc kubenswrapper[4693]: I1008 07:36:28.140160 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-s92d2" event={"ID":"15bacf5c-3c15-48c3-9c6c-478bd9e5d599","Type":"ContainerDied","Data":"58dc5fcdcafbdc23a9631b266d6eb65688b3011c23f7ebc8c76fdfcfe67b74ab"} Oct 08 07:36:28 crc kubenswrapper[4693]: I1008 07:36:28.218141 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-s92d2" Oct 08 07:36:28 crc kubenswrapper[4693]: I1008 07:36:28.277174 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-dns-swift-storage-0\") pod \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\" (UID: \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\") " Oct 08 07:36:28 crc kubenswrapper[4693]: I1008 07:36:28.277214 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-ovsdbserver-nb\") pod \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\" (UID: \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\") " Oct 08 07:36:28 crc kubenswrapper[4693]: I1008 07:36:28.277249 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-ovsdbserver-sb\") pod \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\" (UID: \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\") " Oct 08 07:36:28 crc kubenswrapper[4693]: I1008 07:36:28.277267 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfgk2\" (UniqueName: \"kubernetes.io/projected/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-kube-api-access-dfgk2\") pod \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\" (UID: \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\") " Oct 08 07:36:28 crc kubenswrapper[4693]: I1008 07:36:28.277375 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-dns-svc\") pod \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\" (UID: \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\") " Oct 08 07:36:28 crc kubenswrapper[4693]: I1008 07:36:28.277455 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-config\") pod \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\" (UID: \"15bacf5c-3c15-48c3-9c6c-478bd9e5d599\") " Oct 08 07:36:28 crc kubenswrapper[4693]: I1008 07:36:28.284129 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-kube-api-access-dfgk2" (OuterVolumeSpecName: "kube-api-access-dfgk2") pod "15bacf5c-3c15-48c3-9c6c-478bd9e5d599" (UID: "15bacf5c-3c15-48c3-9c6c-478bd9e5d599"). InnerVolumeSpecName "kube-api-access-dfgk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:36:28 crc kubenswrapper[4693]: I1008 07:36:28.344286 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "15bacf5c-3c15-48c3-9c6c-478bd9e5d599" (UID: "15bacf5c-3c15-48c3-9c6c-478bd9e5d599"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:36:28 crc kubenswrapper[4693]: I1008 07:36:28.345438 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-config" (OuterVolumeSpecName: "config") pod "15bacf5c-3c15-48c3-9c6c-478bd9e5d599" (UID: "15bacf5c-3c15-48c3-9c6c-478bd9e5d599"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:36:28 crc kubenswrapper[4693]: I1008 07:36:28.351929 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "15bacf5c-3c15-48c3-9c6c-478bd9e5d599" (UID: "15bacf5c-3c15-48c3-9c6c-478bd9e5d599"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:36:28 crc kubenswrapper[4693]: I1008 07:36:28.355899 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "15bacf5c-3c15-48c3-9c6c-478bd9e5d599" (UID: "15bacf5c-3c15-48c3-9c6c-478bd9e5d599"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:36:28 crc kubenswrapper[4693]: I1008 07:36:28.367387 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "15bacf5c-3c15-48c3-9c6c-478bd9e5d599" (UID: "15bacf5c-3c15-48c3-9c6c-478bd9e5d599"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:36:28 crc kubenswrapper[4693]: I1008 07:36:28.379317 4693 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:28 crc kubenswrapper[4693]: I1008 07:36:28.379345 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:28 crc kubenswrapper[4693]: I1008 07:36:28.379355 4693 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:28 crc kubenswrapper[4693]: I1008 07:36:28.379366 4693 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:28 crc kubenswrapper[4693]: I1008 07:36:28.379375 4693 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:28 crc kubenswrapper[4693]: I1008 07:36:28.379384 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfgk2\" (UniqueName: \"kubernetes.io/projected/15bacf5c-3c15-48c3-9c6c-478bd9e5d599-kube-api-access-dfgk2\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:29 crc kubenswrapper[4693]: I1008 07:36:29.165886 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-s92d2" event={"ID":"15bacf5c-3c15-48c3-9c6c-478bd9e5d599","Type":"ContainerDied","Data":"44fd18740f24164332bed42b1f1072a4557bc3f054870d313d04650719534754"} Oct 08 07:36:29 crc kubenswrapper[4693]: I1008 07:36:29.165941 4693 scope.go:117] "RemoveContainer" containerID="58dc5fcdcafbdc23a9631b266d6eb65688b3011c23f7ebc8c76fdfcfe67b74ab" Oct 08 07:36:29 crc kubenswrapper[4693]: I1008 07:36:29.166068 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-s92d2" Oct 08 07:36:29 crc kubenswrapper[4693]: I1008 07:36:29.210309 4693 scope.go:117] "RemoveContainer" containerID="cea13c521cc757b8866c612a37cc18e1619ac54e843be832de860c6b2e0b6114" Oct 08 07:36:29 crc kubenswrapper[4693]: I1008 07:36:29.210343 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-s92d2"] Oct 08 07:36:29 crc kubenswrapper[4693]: I1008 07:36:29.221617 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-s92d2"] Oct 08 07:36:29 crc kubenswrapper[4693]: I1008 07:36:29.379345 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15bacf5c-3c15-48c3-9c6c-478bd9e5d599" path="/var/lib/kubelet/pods/15bacf5c-3c15-48c3-9c6c-478bd9e5d599/volumes" Oct 08 07:36:31 crc kubenswrapper[4693]: I1008 07:36:31.200746 4693 generic.go:334] "Generic (PLEG): container finished" podID="8a7c286f-f170-49ca-a7e6-9e4c411d4840" containerID="cef8afda4aec447b6f3834ebd155e491e040a40f584e5d1a87dbb6d1624c6eb2" exitCode=0 Oct 08 07:36:31 crc kubenswrapper[4693]: I1008 07:36:31.202005 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-mtk9c" event={"ID":"8a7c286f-f170-49ca-a7e6-9e4c411d4840","Type":"ContainerDied","Data":"cef8afda4aec447b6f3834ebd155e491e040a40f584e5d1a87dbb6d1624c6eb2"} Oct 08 07:36:32 crc kubenswrapper[4693]: I1008 07:36:32.501871 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-mtk9c" Oct 08 07:36:32 crc kubenswrapper[4693]: I1008 07:36:32.566236 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8a7c286f-f170-49ca-a7e6-9e4c411d4840-scripts\") pod \"8a7c286f-f170-49ca-a7e6-9e4c411d4840\" (UID: \"8a7c286f-f170-49ca-a7e6-9e4c411d4840\") " Oct 08 07:36:32 crc kubenswrapper[4693]: I1008 07:36:32.566756 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a7c286f-f170-49ca-a7e6-9e4c411d4840-config-data\") pod \"8a7c286f-f170-49ca-a7e6-9e4c411d4840\" (UID: \"8a7c286f-f170-49ca-a7e6-9e4c411d4840\") " Oct 08 07:36:32 crc kubenswrapper[4693]: I1008 07:36:32.566780 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wdlwr\" (UniqueName: \"kubernetes.io/projected/8a7c286f-f170-49ca-a7e6-9e4c411d4840-kube-api-access-wdlwr\") pod \"8a7c286f-f170-49ca-a7e6-9e4c411d4840\" (UID: \"8a7c286f-f170-49ca-a7e6-9e4c411d4840\") " Oct 08 07:36:32 crc kubenswrapper[4693]: I1008 07:36:32.566989 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a7c286f-f170-49ca-a7e6-9e4c411d4840-combined-ca-bundle\") pod \"8a7c286f-f170-49ca-a7e6-9e4c411d4840\" (UID: \"8a7c286f-f170-49ca-a7e6-9e4c411d4840\") " Oct 08 07:36:32 crc kubenswrapper[4693]: I1008 07:36:32.572334 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a7c286f-f170-49ca-a7e6-9e4c411d4840-kube-api-access-wdlwr" (OuterVolumeSpecName: "kube-api-access-wdlwr") pod "8a7c286f-f170-49ca-a7e6-9e4c411d4840" (UID: "8a7c286f-f170-49ca-a7e6-9e4c411d4840"). InnerVolumeSpecName "kube-api-access-wdlwr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:36:32 crc kubenswrapper[4693]: I1008 07:36:32.573747 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a7c286f-f170-49ca-a7e6-9e4c411d4840-scripts" (OuterVolumeSpecName: "scripts") pod "8a7c286f-f170-49ca-a7e6-9e4c411d4840" (UID: "8a7c286f-f170-49ca-a7e6-9e4c411d4840"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:36:32 crc kubenswrapper[4693]: I1008 07:36:32.600888 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a7c286f-f170-49ca-a7e6-9e4c411d4840-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8a7c286f-f170-49ca-a7e6-9e4c411d4840" (UID: "8a7c286f-f170-49ca-a7e6-9e4c411d4840"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:36:32 crc kubenswrapper[4693]: I1008 07:36:32.616838 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a7c286f-f170-49ca-a7e6-9e4c411d4840-config-data" (OuterVolumeSpecName: "config-data") pod "8a7c286f-f170-49ca-a7e6-9e4c411d4840" (UID: "8a7c286f-f170-49ca-a7e6-9e4c411d4840"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:36:32 crc kubenswrapper[4693]: I1008 07:36:32.668882 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a7c286f-f170-49ca-a7e6-9e4c411d4840-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:32 crc kubenswrapper[4693]: I1008 07:36:32.668913 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wdlwr\" (UniqueName: \"kubernetes.io/projected/8a7c286f-f170-49ca-a7e6-9e4c411d4840-kube-api-access-wdlwr\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:32 crc kubenswrapper[4693]: I1008 07:36:32.668925 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a7c286f-f170-49ca-a7e6-9e4c411d4840-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:32 crc kubenswrapper[4693]: I1008 07:36:32.668933 4693 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8a7c286f-f170-49ca-a7e6-9e4c411d4840-scripts\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:33 crc kubenswrapper[4693]: I1008 07:36:33.221985 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-mtk9c" event={"ID":"8a7c286f-f170-49ca-a7e6-9e4c411d4840","Type":"ContainerDied","Data":"89877cd7a915eb10f15e41d1ef75ea2c7e8034b5fc0a4b0c7324353e67a619d0"} Oct 08 07:36:33 crc kubenswrapper[4693]: I1008 07:36:33.222032 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="89877cd7a915eb10f15e41d1ef75ea2c7e8034b5fc0a4b0c7324353e67a619d0" Oct 08 07:36:33 crc kubenswrapper[4693]: I1008 07:36:33.222035 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-mtk9c" Oct 08 07:36:33 crc kubenswrapper[4693]: I1008 07:36:33.425752 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 08 07:36:33 crc kubenswrapper[4693]: I1008 07:36:33.426234 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="270aaa79-877c-4a9e-8503-282dafc036d5" containerName="nova-api-log" containerID="cri-o://c838e1eae51e705dd652fc15323ae80a0fc85d71754b40b2dee7bccf4fabe9dd" gracePeriod=30 Oct 08 07:36:33 crc kubenswrapper[4693]: I1008 07:36:33.426879 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="270aaa79-877c-4a9e-8503-282dafc036d5" containerName="nova-api-api" containerID="cri-o://42749746ce4182441debb8daca365589e8adb7dd5ef3844f302a3e076f102557" gracePeriod=30 Oct 08 07:36:33 crc kubenswrapper[4693]: I1008 07:36:33.437197 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 08 07:36:33 crc kubenswrapper[4693]: I1008 07:36:33.437488 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="c9e121a7-a4c1-4813-a55f-c0579fa72459" containerName="nova-scheduler-scheduler" containerID="cri-o://8f3aaaa01cc6350a4abdda16a410967dcf6571a855417e4c74f923b91aaba7a6" gracePeriod=30 Oct 08 07:36:33 crc kubenswrapper[4693]: I1008 07:36:33.445522 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 08 07:36:33 crc kubenswrapper[4693]: I1008 07:36:33.445996 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="065b63c2-611d-4461-bc50-4cfb9e120bba" containerName="nova-metadata-metadata" containerID="cri-o://b950b5dbd41755f767ce40162cf5b8d27a6e8ca910b770e5ca5e8e84825e0dff" gracePeriod=30 Oct 08 07:36:33 crc kubenswrapper[4693]: I1008 07:36:33.445782 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="065b63c2-611d-4461-bc50-4cfb9e120bba" containerName="nova-metadata-log" containerID="cri-o://fa1cafcf02fa231f92aa844a6c11a252c8b6599a132832ac6985037fc30abf63" gracePeriod=30 Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.114726 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.202302 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-66dsv\" (UniqueName: \"kubernetes.io/projected/270aaa79-877c-4a9e-8503-282dafc036d5-kube-api-access-66dsv\") pod \"270aaa79-877c-4a9e-8503-282dafc036d5\" (UID: \"270aaa79-877c-4a9e-8503-282dafc036d5\") " Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.202356 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/270aaa79-877c-4a9e-8503-282dafc036d5-public-tls-certs\") pod \"270aaa79-877c-4a9e-8503-282dafc036d5\" (UID: \"270aaa79-877c-4a9e-8503-282dafc036d5\") " Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.202403 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/270aaa79-877c-4a9e-8503-282dafc036d5-combined-ca-bundle\") pod \"270aaa79-877c-4a9e-8503-282dafc036d5\" (UID: \"270aaa79-877c-4a9e-8503-282dafc036d5\") " Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.202460 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/270aaa79-877c-4a9e-8503-282dafc036d5-config-data\") pod \"270aaa79-877c-4a9e-8503-282dafc036d5\" (UID: \"270aaa79-877c-4a9e-8503-282dafc036d5\") " Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.202510 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/270aaa79-877c-4a9e-8503-282dafc036d5-internal-tls-certs\") pod \"270aaa79-877c-4a9e-8503-282dafc036d5\" (UID: \"270aaa79-877c-4a9e-8503-282dafc036d5\") " Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.202543 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/270aaa79-877c-4a9e-8503-282dafc036d5-logs\") pod \"270aaa79-877c-4a9e-8503-282dafc036d5\" (UID: \"270aaa79-877c-4a9e-8503-282dafc036d5\") " Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.203210 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/270aaa79-877c-4a9e-8503-282dafc036d5-logs" (OuterVolumeSpecName: "logs") pod "270aaa79-877c-4a9e-8503-282dafc036d5" (UID: "270aaa79-877c-4a9e-8503-282dafc036d5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.207006 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/270aaa79-877c-4a9e-8503-282dafc036d5-kube-api-access-66dsv" (OuterVolumeSpecName: "kube-api-access-66dsv") pod "270aaa79-877c-4a9e-8503-282dafc036d5" (UID: "270aaa79-877c-4a9e-8503-282dafc036d5"). InnerVolumeSpecName "kube-api-access-66dsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.239138 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/270aaa79-877c-4a9e-8503-282dafc036d5-config-data" (OuterVolumeSpecName: "config-data") pod "270aaa79-877c-4a9e-8503-282dafc036d5" (UID: "270aaa79-877c-4a9e-8503-282dafc036d5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.244656 4693 generic.go:334] "Generic (PLEG): container finished" podID="270aaa79-877c-4a9e-8503-282dafc036d5" containerID="42749746ce4182441debb8daca365589e8adb7dd5ef3844f302a3e076f102557" exitCode=0 Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.244691 4693 generic.go:334] "Generic (PLEG): container finished" podID="270aaa79-877c-4a9e-8503-282dafc036d5" containerID="c838e1eae51e705dd652fc15323ae80a0fc85d71754b40b2dee7bccf4fabe9dd" exitCode=143 Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.244742 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"270aaa79-877c-4a9e-8503-282dafc036d5","Type":"ContainerDied","Data":"42749746ce4182441debb8daca365589e8adb7dd5ef3844f302a3e076f102557"} Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.244775 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"270aaa79-877c-4a9e-8503-282dafc036d5","Type":"ContainerDied","Data":"c838e1eae51e705dd652fc15323ae80a0fc85d71754b40b2dee7bccf4fabe9dd"} Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.244788 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"270aaa79-877c-4a9e-8503-282dafc036d5","Type":"ContainerDied","Data":"0b04d40f025eae3fa8edfdc19f45deeaef96c449f5064ff2054d2582791fe1bf"} Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.244804 4693 scope.go:117] "RemoveContainer" containerID="42749746ce4182441debb8daca365589e8adb7dd5ef3844f302a3e076f102557" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.244947 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.250400 4693 generic.go:334] "Generic (PLEG): container finished" podID="065b63c2-611d-4461-bc50-4cfb9e120bba" containerID="fa1cafcf02fa231f92aa844a6c11a252c8b6599a132832ac6985037fc30abf63" exitCode=143 Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.250464 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"065b63c2-611d-4461-bc50-4cfb9e120bba","Type":"ContainerDied","Data":"fa1cafcf02fa231f92aa844a6c11a252c8b6599a132832ac6985037fc30abf63"} Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.260577 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/270aaa79-877c-4a9e-8503-282dafc036d5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "270aaa79-877c-4a9e-8503-282dafc036d5" (UID: "270aaa79-877c-4a9e-8503-282dafc036d5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.270516 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/270aaa79-877c-4a9e-8503-282dafc036d5-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "270aaa79-877c-4a9e-8503-282dafc036d5" (UID: "270aaa79-877c-4a9e-8503-282dafc036d5"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.271309 4693 scope.go:117] "RemoveContainer" containerID="c838e1eae51e705dd652fc15323ae80a0fc85d71754b40b2dee7bccf4fabe9dd" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.288015 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/270aaa79-877c-4a9e-8503-282dafc036d5-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "270aaa79-877c-4a9e-8503-282dafc036d5" (UID: "270aaa79-877c-4a9e-8503-282dafc036d5"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.299510 4693 scope.go:117] "RemoveContainer" containerID="42749746ce4182441debb8daca365589e8adb7dd5ef3844f302a3e076f102557" Oct 08 07:36:34 crc kubenswrapper[4693]: E1008 07:36:34.299995 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42749746ce4182441debb8daca365589e8adb7dd5ef3844f302a3e076f102557\": container with ID starting with 42749746ce4182441debb8daca365589e8adb7dd5ef3844f302a3e076f102557 not found: ID does not exist" containerID="42749746ce4182441debb8daca365589e8adb7dd5ef3844f302a3e076f102557" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.300137 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42749746ce4182441debb8daca365589e8adb7dd5ef3844f302a3e076f102557"} err="failed to get container status \"42749746ce4182441debb8daca365589e8adb7dd5ef3844f302a3e076f102557\": rpc error: code = NotFound desc = could not find container \"42749746ce4182441debb8daca365589e8adb7dd5ef3844f302a3e076f102557\": container with ID starting with 42749746ce4182441debb8daca365589e8adb7dd5ef3844f302a3e076f102557 not found: ID does not exist" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.300248 4693 scope.go:117] "RemoveContainer" containerID="c838e1eae51e705dd652fc15323ae80a0fc85d71754b40b2dee7bccf4fabe9dd" Oct 08 07:36:34 crc kubenswrapper[4693]: E1008 07:36:34.300646 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c838e1eae51e705dd652fc15323ae80a0fc85d71754b40b2dee7bccf4fabe9dd\": container with ID starting with c838e1eae51e705dd652fc15323ae80a0fc85d71754b40b2dee7bccf4fabe9dd not found: ID does not exist" containerID="c838e1eae51e705dd652fc15323ae80a0fc85d71754b40b2dee7bccf4fabe9dd" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.300702 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c838e1eae51e705dd652fc15323ae80a0fc85d71754b40b2dee7bccf4fabe9dd"} err="failed to get container status \"c838e1eae51e705dd652fc15323ae80a0fc85d71754b40b2dee7bccf4fabe9dd\": rpc error: code = NotFound desc = could not find container \"c838e1eae51e705dd652fc15323ae80a0fc85d71754b40b2dee7bccf4fabe9dd\": container with ID starting with c838e1eae51e705dd652fc15323ae80a0fc85d71754b40b2dee7bccf4fabe9dd not found: ID does not exist" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.300729 4693 scope.go:117] "RemoveContainer" containerID="42749746ce4182441debb8daca365589e8adb7dd5ef3844f302a3e076f102557" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.301086 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42749746ce4182441debb8daca365589e8adb7dd5ef3844f302a3e076f102557"} err="failed to get container status \"42749746ce4182441debb8daca365589e8adb7dd5ef3844f302a3e076f102557\": rpc error: code = NotFound desc = could not find container \"42749746ce4182441debb8daca365589e8adb7dd5ef3844f302a3e076f102557\": container with ID starting with 42749746ce4182441debb8daca365589e8adb7dd5ef3844f302a3e076f102557 not found: ID does not exist" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.301137 4693 scope.go:117] "RemoveContainer" containerID="c838e1eae51e705dd652fc15323ae80a0fc85d71754b40b2dee7bccf4fabe9dd" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.301359 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c838e1eae51e705dd652fc15323ae80a0fc85d71754b40b2dee7bccf4fabe9dd"} err="failed to get container status \"c838e1eae51e705dd652fc15323ae80a0fc85d71754b40b2dee7bccf4fabe9dd\": rpc error: code = NotFound desc = could not find container \"c838e1eae51e705dd652fc15323ae80a0fc85d71754b40b2dee7bccf4fabe9dd\": container with ID starting with c838e1eae51e705dd652fc15323ae80a0fc85d71754b40b2dee7bccf4fabe9dd not found: ID does not exist" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.304080 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-66dsv\" (UniqueName: \"kubernetes.io/projected/270aaa79-877c-4a9e-8503-282dafc036d5-kube-api-access-66dsv\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.304108 4693 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/270aaa79-877c-4a9e-8503-282dafc036d5-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.304120 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/270aaa79-877c-4a9e-8503-282dafc036d5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.304133 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/270aaa79-877c-4a9e-8503-282dafc036d5-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.304147 4693 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/270aaa79-877c-4a9e-8503-282dafc036d5-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.304158 4693 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/270aaa79-877c-4a9e-8503-282dafc036d5-logs\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.611207 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.620853 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.630963 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 08 07:36:34 crc kubenswrapper[4693]: E1008 07:36:34.631377 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15bacf5c-3c15-48c3-9c6c-478bd9e5d599" containerName="init" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.631401 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="15bacf5c-3c15-48c3-9c6c-478bd9e5d599" containerName="init" Oct 08 07:36:34 crc kubenswrapper[4693]: E1008 07:36:34.631420 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a7c286f-f170-49ca-a7e6-9e4c411d4840" containerName="nova-manage" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.631427 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a7c286f-f170-49ca-a7e6-9e4c411d4840" containerName="nova-manage" Oct 08 07:36:34 crc kubenswrapper[4693]: E1008 07:36:34.631445 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="270aaa79-877c-4a9e-8503-282dafc036d5" containerName="nova-api-api" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.631452 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="270aaa79-877c-4a9e-8503-282dafc036d5" containerName="nova-api-api" Oct 08 07:36:34 crc kubenswrapper[4693]: E1008 07:36:34.631468 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15bacf5c-3c15-48c3-9c6c-478bd9e5d599" containerName="dnsmasq-dns" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.631475 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="15bacf5c-3c15-48c3-9c6c-478bd9e5d599" containerName="dnsmasq-dns" Oct 08 07:36:34 crc kubenswrapper[4693]: E1008 07:36:34.631518 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="270aaa79-877c-4a9e-8503-282dafc036d5" containerName="nova-api-log" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.631528 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="270aaa79-877c-4a9e-8503-282dafc036d5" containerName="nova-api-log" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.631740 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a7c286f-f170-49ca-a7e6-9e4c411d4840" containerName="nova-manage" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.631759 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="270aaa79-877c-4a9e-8503-282dafc036d5" containerName="nova-api-api" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.631780 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="15bacf5c-3c15-48c3-9c6c-478bd9e5d599" containerName="dnsmasq-dns" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.631802 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="270aaa79-877c-4a9e-8503-282dafc036d5" containerName="nova-api-log" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.632891 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.635292 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.635483 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.635633 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.652288 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.711640 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/106dc2ce-316f-4e4e-a87c-ada5021fea4b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"106dc2ce-316f-4e4e-a87c-ada5021fea4b\") " pod="openstack/nova-api-0" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.711912 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/106dc2ce-316f-4e4e-a87c-ada5021fea4b-public-tls-certs\") pod \"nova-api-0\" (UID: \"106dc2ce-316f-4e4e-a87c-ada5021fea4b\") " pod="openstack/nova-api-0" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.711998 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/106dc2ce-316f-4e4e-a87c-ada5021fea4b-config-data\") pod \"nova-api-0\" (UID: \"106dc2ce-316f-4e4e-a87c-ada5021fea4b\") " pod="openstack/nova-api-0" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.712156 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/106dc2ce-316f-4e4e-a87c-ada5021fea4b-logs\") pod \"nova-api-0\" (UID: \"106dc2ce-316f-4e4e-a87c-ada5021fea4b\") " pod="openstack/nova-api-0" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.712280 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/106dc2ce-316f-4e4e-a87c-ada5021fea4b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"106dc2ce-316f-4e4e-a87c-ada5021fea4b\") " pod="openstack/nova-api-0" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.712568 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlvww\" (UniqueName: \"kubernetes.io/projected/106dc2ce-316f-4e4e-a87c-ada5021fea4b-kube-api-access-dlvww\") pod \"nova-api-0\" (UID: \"106dc2ce-316f-4e4e-a87c-ada5021fea4b\") " pod="openstack/nova-api-0" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.813906 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/106dc2ce-316f-4e4e-a87c-ada5021fea4b-logs\") pod \"nova-api-0\" (UID: \"106dc2ce-316f-4e4e-a87c-ada5021fea4b\") " pod="openstack/nova-api-0" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.814153 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/106dc2ce-316f-4e4e-a87c-ada5021fea4b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"106dc2ce-316f-4e4e-a87c-ada5021fea4b\") " pod="openstack/nova-api-0" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.814268 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/106dc2ce-316f-4e4e-a87c-ada5021fea4b-logs\") pod \"nova-api-0\" (UID: \"106dc2ce-316f-4e4e-a87c-ada5021fea4b\") " pod="openstack/nova-api-0" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.814402 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlvww\" (UniqueName: \"kubernetes.io/projected/106dc2ce-316f-4e4e-a87c-ada5021fea4b-kube-api-access-dlvww\") pod \"nova-api-0\" (UID: \"106dc2ce-316f-4e4e-a87c-ada5021fea4b\") " pod="openstack/nova-api-0" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.814543 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/106dc2ce-316f-4e4e-a87c-ada5021fea4b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"106dc2ce-316f-4e4e-a87c-ada5021fea4b\") " pod="openstack/nova-api-0" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.814666 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/106dc2ce-316f-4e4e-a87c-ada5021fea4b-public-tls-certs\") pod \"nova-api-0\" (UID: \"106dc2ce-316f-4e4e-a87c-ada5021fea4b\") " pod="openstack/nova-api-0" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.814793 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/106dc2ce-316f-4e4e-a87c-ada5021fea4b-config-data\") pod \"nova-api-0\" (UID: \"106dc2ce-316f-4e4e-a87c-ada5021fea4b\") " pod="openstack/nova-api-0" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.818226 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/106dc2ce-316f-4e4e-a87c-ada5021fea4b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"106dc2ce-316f-4e4e-a87c-ada5021fea4b\") " pod="openstack/nova-api-0" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.819516 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/106dc2ce-316f-4e4e-a87c-ada5021fea4b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"106dc2ce-316f-4e4e-a87c-ada5021fea4b\") " pod="openstack/nova-api-0" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.820193 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/106dc2ce-316f-4e4e-a87c-ada5021fea4b-config-data\") pod \"nova-api-0\" (UID: \"106dc2ce-316f-4e4e-a87c-ada5021fea4b\") " pod="openstack/nova-api-0" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.829462 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/106dc2ce-316f-4e4e-a87c-ada5021fea4b-public-tls-certs\") pod \"nova-api-0\" (UID: \"106dc2ce-316f-4e4e-a87c-ada5021fea4b\") " pod="openstack/nova-api-0" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.833411 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlvww\" (UniqueName: \"kubernetes.io/projected/106dc2ce-316f-4e4e-a87c-ada5021fea4b-kube-api-access-dlvww\") pod \"nova-api-0\" (UID: \"106dc2ce-316f-4e4e-a87c-ada5021fea4b\") " pod="openstack/nova-api-0" Oct 08 07:36:34 crc kubenswrapper[4693]: I1008 07:36:34.949074 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 08 07:36:35 crc kubenswrapper[4693]: E1008 07:36:35.009509 4693 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8f3aaaa01cc6350a4abdda16a410967dcf6571a855417e4c74f923b91aaba7a6" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 08 07:36:35 crc kubenswrapper[4693]: E1008 07:36:35.011500 4693 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8f3aaaa01cc6350a4abdda16a410967dcf6571a855417e4c74f923b91aaba7a6" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 08 07:36:35 crc kubenswrapper[4693]: E1008 07:36:35.013705 4693 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8f3aaaa01cc6350a4abdda16a410967dcf6571a855417e4c74f923b91aaba7a6" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 08 07:36:35 crc kubenswrapper[4693]: E1008 07:36:35.013793 4693 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="c9e121a7-a4c1-4813-a55f-c0579fa72459" containerName="nova-scheduler-scheduler" Oct 08 07:36:35 crc kubenswrapper[4693]: I1008 07:36:35.381710 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="270aaa79-877c-4a9e-8503-282dafc036d5" path="/var/lib/kubelet/pods/270aaa79-877c-4a9e-8503-282dafc036d5/volumes" Oct 08 07:36:35 crc kubenswrapper[4693]: I1008 07:36:35.456021 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 08 07:36:35 crc kubenswrapper[4693]: W1008 07:36:35.461715 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod106dc2ce_316f_4e4e_a87c_ada5021fea4b.slice/crio-031c002b3c7c660a34bf87d0a723cb6fc86fcf091a4c31e68fc32eb87be2d628 WatchSource:0}: Error finding container 031c002b3c7c660a34bf87d0a723cb6fc86fcf091a4c31e68fc32eb87be2d628: Status 404 returned error can't find the container with id 031c002b3c7c660a34bf87d0a723cb6fc86fcf091a4c31e68fc32eb87be2d628 Oct 08 07:36:36 crc kubenswrapper[4693]: I1008 07:36:36.272877 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"106dc2ce-316f-4e4e-a87c-ada5021fea4b","Type":"ContainerStarted","Data":"b97bbcf13c7b4bbf08d09d755e04fc1f11513fe81f03553b36ddc0d8c47cffa3"} Oct 08 07:36:36 crc kubenswrapper[4693]: I1008 07:36:36.273353 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"106dc2ce-316f-4e4e-a87c-ada5021fea4b","Type":"ContainerStarted","Data":"f8c97436ba1f9169830beb679a6221379c0eda05b8c54f8e2387f28d40dc4343"} Oct 08 07:36:36 crc kubenswrapper[4693]: I1008 07:36:36.273432 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"106dc2ce-316f-4e4e-a87c-ada5021fea4b","Type":"ContainerStarted","Data":"031c002b3c7c660a34bf87d0a723cb6fc86fcf091a4c31e68fc32eb87be2d628"} Oct 08 07:36:36 crc kubenswrapper[4693]: I1008 07:36:36.305569 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.305540175 podStartE2EDuration="2.305540175s" podCreationTimestamp="2025-10-08 07:36:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:36:36.297103681 +0000 UTC m=+1181.668068626" watchObservedRunningTime="2025-10-08 07:36:36.305540175 +0000 UTC m=+1181.676505110" Oct 08 07:36:36 crc kubenswrapper[4693]: I1008 07:36:36.583773 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="065b63c2-611d-4461-bc50-4cfb9e120bba" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.192:8775/\": read tcp 10.217.0.2:60174->10.217.0.192:8775: read: connection reset by peer" Oct 08 07:36:36 crc kubenswrapper[4693]: I1008 07:36:36.583863 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="065b63c2-611d-4461-bc50-4cfb9e120bba" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.192:8775/\": read tcp 10.217.0.2:60170->10.217.0.192:8775: read: connection reset by peer" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.105839 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.271263 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-srtxw\" (UniqueName: \"kubernetes.io/projected/065b63c2-611d-4461-bc50-4cfb9e120bba-kube-api-access-srtxw\") pod \"065b63c2-611d-4461-bc50-4cfb9e120bba\" (UID: \"065b63c2-611d-4461-bc50-4cfb9e120bba\") " Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.271479 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/065b63c2-611d-4461-bc50-4cfb9e120bba-combined-ca-bundle\") pod \"065b63c2-611d-4461-bc50-4cfb9e120bba\" (UID: \"065b63c2-611d-4461-bc50-4cfb9e120bba\") " Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.271755 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/065b63c2-611d-4461-bc50-4cfb9e120bba-logs\") pod \"065b63c2-611d-4461-bc50-4cfb9e120bba\" (UID: \"065b63c2-611d-4461-bc50-4cfb9e120bba\") " Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.271949 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/065b63c2-611d-4461-bc50-4cfb9e120bba-config-data\") pod \"065b63c2-611d-4461-bc50-4cfb9e120bba\" (UID: \"065b63c2-611d-4461-bc50-4cfb9e120bba\") " Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.272066 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/065b63c2-611d-4461-bc50-4cfb9e120bba-nova-metadata-tls-certs\") pod \"065b63c2-611d-4461-bc50-4cfb9e120bba\" (UID: \"065b63c2-611d-4461-bc50-4cfb9e120bba\") " Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.272549 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/065b63c2-611d-4461-bc50-4cfb9e120bba-logs" (OuterVolumeSpecName: "logs") pod "065b63c2-611d-4461-bc50-4cfb9e120bba" (UID: "065b63c2-611d-4461-bc50-4cfb9e120bba"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.285294 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/065b63c2-611d-4461-bc50-4cfb9e120bba-kube-api-access-srtxw" (OuterVolumeSpecName: "kube-api-access-srtxw") pod "065b63c2-611d-4461-bc50-4cfb9e120bba" (UID: "065b63c2-611d-4461-bc50-4cfb9e120bba"). InnerVolumeSpecName "kube-api-access-srtxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.289399 4693 generic.go:334] "Generic (PLEG): container finished" podID="065b63c2-611d-4461-bc50-4cfb9e120bba" containerID="b950b5dbd41755f767ce40162cf5b8d27a6e8ca910b770e5ca5e8e84825e0dff" exitCode=0 Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.289473 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.289746 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"065b63c2-611d-4461-bc50-4cfb9e120bba","Type":"ContainerDied","Data":"b950b5dbd41755f767ce40162cf5b8d27a6e8ca910b770e5ca5e8e84825e0dff"} Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.289893 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"065b63c2-611d-4461-bc50-4cfb9e120bba","Type":"ContainerDied","Data":"e971e8efc62f1b956e5813ab7d124060abca0d828ed64e03c7cac2dd5815f020"} Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.290027 4693 scope.go:117] "RemoveContainer" containerID="b950b5dbd41755f767ce40162cf5b8d27a6e8ca910b770e5ca5e8e84825e0dff" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.301590 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/065b63c2-611d-4461-bc50-4cfb9e120bba-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "065b63c2-611d-4461-bc50-4cfb9e120bba" (UID: "065b63c2-611d-4461-bc50-4cfb9e120bba"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.322987 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/065b63c2-611d-4461-bc50-4cfb9e120bba-config-data" (OuterVolumeSpecName: "config-data") pod "065b63c2-611d-4461-bc50-4cfb9e120bba" (UID: "065b63c2-611d-4461-bc50-4cfb9e120bba"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.349470 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/065b63c2-611d-4461-bc50-4cfb9e120bba-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "065b63c2-611d-4461-bc50-4cfb9e120bba" (UID: "065b63c2-611d-4461-bc50-4cfb9e120bba"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.374620 4693 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/065b63c2-611d-4461-bc50-4cfb9e120bba-logs\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.374645 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/065b63c2-611d-4461-bc50-4cfb9e120bba-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.374657 4693 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/065b63c2-611d-4461-bc50-4cfb9e120bba-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.374666 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-srtxw\" (UniqueName: \"kubernetes.io/projected/065b63c2-611d-4461-bc50-4cfb9e120bba-kube-api-access-srtxw\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.374674 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/065b63c2-611d-4461-bc50-4cfb9e120bba-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.392422 4693 scope.go:117] "RemoveContainer" containerID="fa1cafcf02fa231f92aa844a6c11a252c8b6599a132832ac6985037fc30abf63" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.414944 4693 scope.go:117] "RemoveContainer" containerID="b950b5dbd41755f767ce40162cf5b8d27a6e8ca910b770e5ca5e8e84825e0dff" Oct 08 07:36:37 crc kubenswrapper[4693]: E1008 07:36:37.415289 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b950b5dbd41755f767ce40162cf5b8d27a6e8ca910b770e5ca5e8e84825e0dff\": container with ID starting with b950b5dbd41755f767ce40162cf5b8d27a6e8ca910b770e5ca5e8e84825e0dff not found: ID does not exist" containerID="b950b5dbd41755f767ce40162cf5b8d27a6e8ca910b770e5ca5e8e84825e0dff" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.415325 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b950b5dbd41755f767ce40162cf5b8d27a6e8ca910b770e5ca5e8e84825e0dff"} err="failed to get container status \"b950b5dbd41755f767ce40162cf5b8d27a6e8ca910b770e5ca5e8e84825e0dff\": rpc error: code = NotFound desc = could not find container \"b950b5dbd41755f767ce40162cf5b8d27a6e8ca910b770e5ca5e8e84825e0dff\": container with ID starting with b950b5dbd41755f767ce40162cf5b8d27a6e8ca910b770e5ca5e8e84825e0dff not found: ID does not exist" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.415350 4693 scope.go:117] "RemoveContainer" containerID="fa1cafcf02fa231f92aa844a6c11a252c8b6599a132832ac6985037fc30abf63" Oct 08 07:36:37 crc kubenswrapper[4693]: E1008 07:36:37.415576 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa1cafcf02fa231f92aa844a6c11a252c8b6599a132832ac6985037fc30abf63\": container with ID starting with fa1cafcf02fa231f92aa844a6c11a252c8b6599a132832ac6985037fc30abf63 not found: ID does not exist" containerID="fa1cafcf02fa231f92aa844a6c11a252c8b6599a132832ac6985037fc30abf63" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.415599 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa1cafcf02fa231f92aa844a6c11a252c8b6599a132832ac6985037fc30abf63"} err="failed to get container status \"fa1cafcf02fa231f92aa844a6c11a252c8b6599a132832ac6985037fc30abf63\": rpc error: code = NotFound desc = could not find container \"fa1cafcf02fa231f92aa844a6c11a252c8b6599a132832ac6985037fc30abf63\": container with ID starting with fa1cafcf02fa231f92aa844a6c11a252c8b6599a132832ac6985037fc30abf63 not found: ID does not exist" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.614731 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.623458 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.635207 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 08 07:36:37 crc kubenswrapper[4693]: E1008 07:36:37.636249 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="065b63c2-611d-4461-bc50-4cfb9e120bba" containerName="nova-metadata-metadata" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.636355 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="065b63c2-611d-4461-bc50-4cfb9e120bba" containerName="nova-metadata-metadata" Oct 08 07:36:37 crc kubenswrapper[4693]: E1008 07:36:37.636462 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="065b63c2-611d-4461-bc50-4cfb9e120bba" containerName="nova-metadata-log" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.636560 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="065b63c2-611d-4461-bc50-4cfb9e120bba" containerName="nova-metadata-log" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.637015 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="065b63c2-611d-4461-bc50-4cfb9e120bba" containerName="nova-metadata-log" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.637115 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="065b63c2-611d-4461-bc50-4cfb9e120bba" containerName="nova-metadata-metadata" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.638806 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.646453 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.646578 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.661497 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.783188 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22aa81a6-83fc-4751-aa3b-c77361db77c0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"22aa81a6-83fc-4751-aa3b-c77361db77c0\") " pod="openstack/nova-metadata-0" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.783538 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhvsx\" (UniqueName: \"kubernetes.io/projected/22aa81a6-83fc-4751-aa3b-c77361db77c0-kube-api-access-vhvsx\") pod \"nova-metadata-0\" (UID: \"22aa81a6-83fc-4751-aa3b-c77361db77c0\") " pod="openstack/nova-metadata-0" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.783633 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22aa81a6-83fc-4751-aa3b-c77361db77c0-logs\") pod \"nova-metadata-0\" (UID: \"22aa81a6-83fc-4751-aa3b-c77361db77c0\") " pod="openstack/nova-metadata-0" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.783864 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/22aa81a6-83fc-4751-aa3b-c77361db77c0-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"22aa81a6-83fc-4751-aa3b-c77361db77c0\") " pod="openstack/nova-metadata-0" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.783941 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22aa81a6-83fc-4751-aa3b-c77361db77c0-config-data\") pod \"nova-metadata-0\" (UID: \"22aa81a6-83fc-4751-aa3b-c77361db77c0\") " pod="openstack/nova-metadata-0" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.885755 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22aa81a6-83fc-4751-aa3b-c77361db77c0-logs\") pod \"nova-metadata-0\" (UID: \"22aa81a6-83fc-4751-aa3b-c77361db77c0\") " pod="openstack/nova-metadata-0" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.885882 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/22aa81a6-83fc-4751-aa3b-c77361db77c0-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"22aa81a6-83fc-4751-aa3b-c77361db77c0\") " pod="openstack/nova-metadata-0" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.885920 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22aa81a6-83fc-4751-aa3b-c77361db77c0-config-data\") pod \"nova-metadata-0\" (UID: \"22aa81a6-83fc-4751-aa3b-c77361db77c0\") " pod="openstack/nova-metadata-0" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.886035 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22aa81a6-83fc-4751-aa3b-c77361db77c0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"22aa81a6-83fc-4751-aa3b-c77361db77c0\") " pod="openstack/nova-metadata-0" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.886085 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhvsx\" (UniqueName: \"kubernetes.io/projected/22aa81a6-83fc-4751-aa3b-c77361db77c0-kube-api-access-vhvsx\") pod \"nova-metadata-0\" (UID: \"22aa81a6-83fc-4751-aa3b-c77361db77c0\") " pod="openstack/nova-metadata-0" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.886209 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22aa81a6-83fc-4751-aa3b-c77361db77c0-logs\") pod \"nova-metadata-0\" (UID: \"22aa81a6-83fc-4751-aa3b-c77361db77c0\") " pod="openstack/nova-metadata-0" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.890123 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22aa81a6-83fc-4751-aa3b-c77361db77c0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"22aa81a6-83fc-4751-aa3b-c77361db77c0\") " pod="openstack/nova-metadata-0" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.893384 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/22aa81a6-83fc-4751-aa3b-c77361db77c0-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"22aa81a6-83fc-4751-aa3b-c77361db77c0\") " pod="openstack/nova-metadata-0" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.903451 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22aa81a6-83fc-4751-aa3b-c77361db77c0-config-data\") pod \"nova-metadata-0\" (UID: \"22aa81a6-83fc-4751-aa3b-c77361db77c0\") " pod="openstack/nova-metadata-0" Oct 08 07:36:37 crc kubenswrapper[4693]: I1008 07:36:37.911364 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhvsx\" (UniqueName: \"kubernetes.io/projected/22aa81a6-83fc-4751-aa3b-c77361db77c0-kube-api-access-vhvsx\") pod \"nova-metadata-0\" (UID: \"22aa81a6-83fc-4751-aa3b-c77361db77c0\") " pod="openstack/nova-metadata-0" Oct 08 07:36:38 crc kubenswrapper[4693]: I1008 07:36:38.007481 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 08 07:36:38 crc kubenswrapper[4693]: I1008 07:36:38.262467 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 08 07:36:38 crc kubenswrapper[4693]: W1008 07:36:38.267272 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod22aa81a6_83fc_4751_aa3b_c77361db77c0.slice/crio-60eee0b1e0c9f499aa958dc5c54712a908798a99c3d238d92e737b0db7b08498 WatchSource:0}: Error finding container 60eee0b1e0c9f499aa958dc5c54712a908798a99c3d238d92e737b0db7b08498: Status 404 returned error can't find the container with id 60eee0b1e0c9f499aa958dc5c54712a908798a99c3d238d92e737b0db7b08498 Oct 08 07:36:38 crc kubenswrapper[4693]: I1008 07:36:38.301206 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"22aa81a6-83fc-4751-aa3b-c77361db77c0","Type":"ContainerStarted","Data":"60eee0b1e0c9f499aa958dc5c54712a908798a99c3d238d92e737b0db7b08498"} Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.118351 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.223978 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9e121a7-a4c1-4813-a55f-c0579fa72459-config-data\") pod \"c9e121a7-a4c1-4813-a55f-c0579fa72459\" (UID: \"c9e121a7-a4c1-4813-a55f-c0579fa72459\") " Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.224121 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9e121a7-a4c1-4813-a55f-c0579fa72459-combined-ca-bundle\") pod \"c9e121a7-a4c1-4813-a55f-c0579fa72459\" (UID: \"c9e121a7-a4c1-4813-a55f-c0579fa72459\") " Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.224177 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cknxw\" (UniqueName: \"kubernetes.io/projected/c9e121a7-a4c1-4813-a55f-c0579fa72459-kube-api-access-cknxw\") pod \"c9e121a7-a4c1-4813-a55f-c0579fa72459\" (UID: \"c9e121a7-a4c1-4813-a55f-c0579fa72459\") " Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.232082 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9e121a7-a4c1-4813-a55f-c0579fa72459-kube-api-access-cknxw" (OuterVolumeSpecName: "kube-api-access-cknxw") pod "c9e121a7-a4c1-4813-a55f-c0579fa72459" (UID: "c9e121a7-a4c1-4813-a55f-c0579fa72459"). InnerVolumeSpecName "kube-api-access-cknxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.251684 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9e121a7-a4c1-4813-a55f-c0579fa72459-config-data" (OuterVolumeSpecName: "config-data") pod "c9e121a7-a4c1-4813-a55f-c0579fa72459" (UID: "c9e121a7-a4c1-4813-a55f-c0579fa72459"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.253572 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9e121a7-a4c1-4813-a55f-c0579fa72459-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c9e121a7-a4c1-4813-a55f-c0579fa72459" (UID: "c9e121a7-a4c1-4813-a55f-c0579fa72459"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.313870 4693 generic.go:334] "Generic (PLEG): container finished" podID="c9e121a7-a4c1-4813-a55f-c0579fa72459" containerID="8f3aaaa01cc6350a4abdda16a410967dcf6571a855417e4c74f923b91aaba7a6" exitCode=0 Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.313925 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c9e121a7-a4c1-4813-a55f-c0579fa72459","Type":"ContainerDied","Data":"8f3aaaa01cc6350a4abdda16a410967dcf6571a855417e4c74f923b91aaba7a6"} Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.313991 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c9e121a7-a4c1-4813-a55f-c0579fa72459","Type":"ContainerDied","Data":"40f092bbb099c96163ed72df0af1d29748c37b7811a41bdccfe2deda9b23b9c6"} Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.314021 4693 scope.go:117] "RemoveContainer" containerID="8f3aaaa01cc6350a4abdda16a410967dcf6571a855417e4c74f923b91aaba7a6" Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.313944 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.318666 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"22aa81a6-83fc-4751-aa3b-c77361db77c0","Type":"ContainerStarted","Data":"b9337b852fb02c9e848840c6013c64473366287bc105ded20d838a2f0ade4808"} Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.318762 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"22aa81a6-83fc-4751-aa3b-c77361db77c0","Type":"ContainerStarted","Data":"174063d220719285a52631ba4fc820a7cad9c65a9ff41d6a778732affea212a3"} Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.327002 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9e121a7-a4c1-4813-a55f-c0579fa72459-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.327045 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9e121a7-a4c1-4813-a55f-c0579fa72459-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.327060 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cknxw\" (UniqueName: \"kubernetes.io/projected/c9e121a7-a4c1-4813-a55f-c0579fa72459-kube-api-access-cknxw\") on node \"crc\" DevicePath \"\"" Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.339458 4693 scope.go:117] "RemoveContainer" containerID="8f3aaaa01cc6350a4abdda16a410967dcf6571a855417e4c74f923b91aaba7a6" Oct 08 07:36:39 crc kubenswrapper[4693]: E1008 07:36:39.339971 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f3aaaa01cc6350a4abdda16a410967dcf6571a855417e4c74f923b91aaba7a6\": container with ID starting with 8f3aaaa01cc6350a4abdda16a410967dcf6571a855417e4c74f923b91aaba7a6 not found: ID does not exist" containerID="8f3aaaa01cc6350a4abdda16a410967dcf6571a855417e4c74f923b91aaba7a6" Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.340036 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f3aaaa01cc6350a4abdda16a410967dcf6571a855417e4c74f923b91aaba7a6"} err="failed to get container status \"8f3aaaa01cc6350a4abdda16a410967dcf6571a855417e4c74f923b91aaba7a6\": rpc error: code = NotFound desc = could not find container \"8f3aaaa01cc6350a4abdda16a410967dcf6571a855417e4c74f923b91aaba7a6\": container with ID starting with 8f3aaaa01cc6350a4abdda16a410967dcf6571a855417e4c74f923b91aaba7a6 not found: ID does not exist" Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.342030 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.342014475 podStartE2EDuration="2.342014475s" podCreationTimestamp="2025-10-08 07:36:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:36:39.337842149 +0000 UTC m=+1184.708807094" watchObservedRunningTime="2025-10-08 07:36:39.342014475 +0000 UTC m=+1184.712979410" Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.386917 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="065b63c2-611d-4461-bc50-4cfb9e120bba" path="/var/lib/kubelet/pods/065b63c2-611d-4461-bc50-4cfb9e120bba/volumes" Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.387753 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.422366 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.422435 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 08 07:36:39 crc kubenswrapper[4693]: E1008 07:36:39.423623 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9e121a7-a4c1-4813-a55f-c0579fa72459" containerName="nova-scheduler-scheduler" Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.423666 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9e121a7-a4c1-4813-a55f-c0579fa72459" containerName="nova-scheduler-scheduler" Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.424368 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9e121a7-a4c1-4813-a55f-c0579fa72459" containerName="nova-scheduler-scheduler" Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.425597 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.428918 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.452796 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.532638 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fcb903f-8f89-47a5-b120-a3e8daaaa2ae-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4fcb903f-8f89-47a5-b120-a3e8daaaa2ae\") " pod="openstack/nova-scheduler-0" Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.532966 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8s9vq\" (UniqueName: \"kubernetes.io/projected/4fcb903f-8f89-47a5-b120-a3e8daaaa2ae-kube-api-access-8s9vq\") pod \"nova-scheduler-0\" (UID: \"4fcb903f-8f89-47a5-b120-a3e8daaaa2ae\") " pod="openstack/nova-scheduler-0" Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.532990 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fcb903f-8f89-47a5-b120-a3e8daaaa2ae-config-data\") pod \"nova-scheduler-0\" (UID: \"4fcb903f-8f89-47a5-b120-a3e8daaaa2ae\") " pod="openstack/nova-scheduler-0" Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.635306 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fcb903f-8f89-47a5-b120-a3e8daaaa2ae-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4fcb903f-8f89-47a5-b120-a3e8daaaa2ae\") " pod="openstack/nova-scheduler-0" Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.635433 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8s9vq\" (UniqueName: \"kubernetes.io/projected/4fcb903f-8f89-47a5-b120-a3e8daaaa2ae-kube-api-access-8s9vq\") pod \"nova-scheduler-0\" (UID: \"4fcb903f-8f89-47a5-b120-a3e8daaaa2ae\") " pod="openstack/nova-scheduler-0" Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.635457 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fcb903f-8f89-47a5-b120-a3e8daaaa2ae-config-data\") pod \"nova-scheduler-0\" (UID: \"4fcb903f-8f89-47a5-b120-a3e8daaaa2ae\") " pod="openstack/nova-scheduler-0" Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.640180 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fcb903f-8f89-47a5-b120-a3e8daaaa2ae-config-data\") pod \"nova-scheduler-0\" (UID: \"4fcb903f-8f89-47a5-b120-a3e8daaaa2ae\") " pod="openstack/nova-scheduler-0" Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.640943 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fcb903f-8f89-47a5-b120-a3e8daaaa2ae-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4fcb903f-8f89-47a5-b120-a3e8daaaa2ae\") " pod="openstack/nova-scheduler-0" Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.654751 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8s9vq\" (UniqueName: \"kubernetes.io/projected/4fcb903f-8f89-47a5-b120-a3e8daaaa2ae-kube-api-access-8s9vq\") pod \"nova-scheduler-0\" (UID: \"4fcb903f-8f89-47a5-b120-a3e8daaaa2ae\") " pod="openstack/nova-scheduler-0" Oct 08 07:36:39 crc kubenswrapper[4693]: I1008 07:36:39.756699 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 08 07:36:40 crc kubenswrapper[4693]: I1008 07:36:40.186121 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 08 07:36:40 crc kubenswrapper[4693]: I1008 07:36:40.330634 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4fcb903f-8f89-47a5-b120-a3e8daaaa2ae","Type":"ContainerStarted","Data":"bf3dbbe18515da7ad4fe7813dca005e7d2253061eb26821f5c306f693e673449"} Oct 08 07:36:41 crc kubenswrapper[4693]: I1008 07:36:41.343559 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4fcb903f-8f89-47a5-b120-a3e8daaaa2ae","Type":"ContainerStarted","Data":"aa32b400f005d1c84beccb1832744fe628fd0c21ce49da2037e44c785422fe85"} Oct 08 07:36:41 crc kubenswrapper[4693]: I1008 07:36:41.363150 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.3631307599999998 podStartE2EDuration="2.36313076s" podCreationTimestamp="2025-10-08 07:36:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:36:41.362881034 +0000 UTC m=+1186.733845979" watchObservedRunningTime="2025-10-08 07:36:41.36313076 +0000 UTC m=+1186.734095705" Oct 08 07:36:41 crc kubenswrapper[4693]: I1008 07:36:41.383198 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9e121a7-a4c1-4813-a55f-c0579fa72459" path="/var/lib/kubelet/pods/c9e121a7-a4c1-4813-a55f-c0579fa72459/volumes" Oct 08 07:36:43 crc kubenswrapper[4693]: I1008 07:36:43.008322 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 08 07:36:43 crc kubenswrapper[4693]: I1008 07:36:43.009588 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 08 07:36:44 crc kubenswrapper[4693]: I1008 07:36:44.757419 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 08 07:36:44 crc kubenswrapper[4693]: I1008 07:36:44.949288 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 08 07:36:44 crc kubenswrapper[4693]: I1008 07:36:44.949347 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 08 07:36:46 crc kubenswrapper[4693]: I1008 07:36:46.000022 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="106dc2ce-316f-4e4e-a87c-ada5021fea4b" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.202:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 08 07:36:46 crc kubenswrapper[4693]: I1008 07:36:46.000030 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="106dc2ce-316f-4e4e-a87c-ada5021fea4b" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.202:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 08 07:36:48 crc kubenswrapper[4693]: I1008 07:36:48.008684 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 08 07:36:48 crc kubenswrapper[4693]: I1008 07:36:48.009180 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 08 07:36:49 crc kubenswrapper[4693]: I1008 07:36:49.021953 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="22aa81a6-83fc-4751-aa3b-c77361db77c0" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.203:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 08 07:36:49 crc kubenswrapper[4693]: I1008 07:36:49.021973 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="22aa81a6-83fc-4751-aa3b-c77361db77c0" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.203:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 08 07:36:49 crc kubenswrapper[4693]: I1008 07:36:49.757643 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 08 07:36:49 crc kubenswrapper[4693]: I1008 07:36:49.787294 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 08 07:36:50 crc kubenswrapper[4693]: I1008 07:36:50.494709 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 08 07:36:51 crc kubenswrapper[4693]: I1008 07:36:51.753379 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 08 07:36:54 crc kubenswrapper[4693]: I1008 07:36:54.959694 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 08 07:36:54 crc kubenswrapper[4693]: I1008 07:36:54.960729 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 08 07:36:54 crc kubenswrapper[4693]: I1008 07:36:54.961717 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 08 07:36:54 crc kubenswrapper[4693]: I1008 07:36:54.975541 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 08 07:36:55 crc kubenswrapper[4693]: I1008 07:36:55.507383 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 08 07:36:55 crc kubenswrapper[4693]: I1008 07:36:55.517288 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 08 07:36:58 crc kubenswrapper[4693]: I1008 07:36:58.023125 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 08 07:36:58 crc kubenswrapper[4693]: I1008 07:36:58.026419 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 08 07:36:58 crc kubenswrapper[4693]: I1008 07:36:58.030294 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 08 07:36:58 crc kubenswrapper[4693]: I1008 07:36:58.557881 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 08 07:37:06 crc kubenswrapper[4693]: I1008 07:37:06.714060 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 08 07:37:07 crc kubenswrapper[4693]: I1008 07:37:07.481054 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 08 07:37:10 crc kubenswrapper[4693]: I1008 07:37:10.689901 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="344b4125-6848-4985-b722-8e9e589b1ab4" containerName="rabbitmq" containerID="cri-o://0b380042086b13851d1db0c9f16aba42e574a7a4e45d02dddcf5aa3026a8cd53" gracePeriod=604797 Oct 08 07:37:11 crc kubenswrapper[4693]: I1008 07:37:11.144632 4693 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="344b4125-6848-4985-b722-8e9e589b1ab4" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.101:5671: connect: connection refused" Oct 08 07:37:11 crc kubenswrapper[4693]: I1008 07:37:11.999845 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="cc3c1ad2-7355-4db4-af71-27c3454a025c" containerName="rabbitmq" containerID="cri-o://5b9ae188c9c7a09d4da4cf074043c5806d749edf48accbeaacfe0738b87c4545" gracePeriod=604796 Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.351516 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.527612 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"344b4125-6848-4985-b722-8e9e589b1ab4\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.527681 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldmwx\" (UniqueName: \"kubernetes.io/projected/344b4125-6848-4985-b722-8e9e589b1ab4-kube-api-access-ldmwx\") pod \"344b4125-6848-4985-b722-8e9e589b1ab4\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.527710 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/344b4125-6848-4985-b722-8e9e589b1ab4-rabbitmq-erlang-cookie\") pod \"344b4125-6848-4985-b722-8e9e589b1ab4\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.527745 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/344b4125-6848-4985-b722-8e9e589b1ab4-pod-info\") pod \"344b4125-6848-4985-b722-8e9e589b1ab4\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.527783 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/344b4125-6848-4985-b722-8e9e589b1ab4-rabbitmq-plugins\") pod \"344b4125-6848-4985-b722-8e9e589b1ab4\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.527843 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/344b4125-6848-4985-b722-8e9e589b1ab4-rabbitmq-confd\") pod \"344b4125-6848-4985-b722-8e9e589b1ab4\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.527924 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/344b4125-6848-4985-b722-8e9e589b1ab4-erlang-cookie-secret\") pod \"344b4125-6848-4985-b722-8e9e589b1ab4\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.527942 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/344b4125-6848-4985-b722-8e9e589b1ab4-config-data\") pod \"344b4125-6848-4985-b722-8e9e589b1ab4\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.527962 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/344b4125-6848-4985-b722-8e9e589b1ab4-server-conf\") pod \"344b4125-6848-4985-b722-8e9e589b1ab4\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.528035 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/344b4125-6848-4985-b722-8e9e589b1ab4-plugins-conf\") pod \"344b4125-6848-4985-b722-8e9e589b1ab4\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.528051 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/344b4125-6848-4985-b722-8e9e589b1ab4-rabbitmq-tls\") pod \"344b4125-6848-4985-b722-8e9e589b1ab4\" (UID: \"344b4125-6848-4985-b722-8e9e589b1ab4\") " Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.535279 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/344b4125-6848-4985-b722-8e9e589b1ab4-kube-api-access-ldmwx" (OuterVolumeSpecName: "kube-api-access-ldmwx") pod "344b4125-6848-4985-b722-8e9e589b1ab4" (UID: "344b4125-6848-4985-b722-8e9e589b1ab4"). InnerVolumeSpecName "kube-api-access-ldmwx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.537853 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/344b4125-6848-4985-b722-8e9e589b1ab4-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "344b4125-6848-4985-b722-8e9e589b1ab4" (UID: "344b4125-6848-4985-b722-8e9e589b1ab4"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.538693 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/344b4125-6848-4985-b722-8e9e589b1ab4-pod-info" (OuterVolumeSpecName: "pod-info") pod "344b4125-6848-4985-b722-8e9e589b1ab4" (UID: "344b4125-6848-4985-b722-8e9e589b1ab4"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.539186 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/344b4125-6848-4985-b722-8e9e589b1ab4-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "344b4125-6848-4985-b722-8e9e589b1ab4" (UID: "344b4125-6848-4985-b722-8e9e589b1ab4"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.539780 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/344b4125-6848-4985-b722-8e9e589b1ab4-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "344b4125-6848-4985-b722-8e9e589b1ab4" (UID: "344b4125-6848-4985-b722-8e9e589b1ab4"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.557805 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "persistence") pod "344b4125-6848-4985-b722-8e9e589b1ab4" (UID: "344b4125-6848-4985-b722-8e9e589b1ab4"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.558914 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/344b4125-6848-4985-b722-8e9e589b1ab4-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "344b4125-6848-4985-b722-8e9e589b1ab4" (UID: "344b4125-6848-4985-b722-8e9e589b1ab4"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.567312 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/344b4125-6848-4985-b722-8e9e589b1ab4-config-data" (OuterVolumeSpecName: "config-data") pod "344b4125-6848-4985-b722-8e9e589b1ab4" (UID: "344b4125-6848-4985-b722-8e9e589b1ab4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.574468 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/344b4125-6848-4985-b722-8e9e589b1ab4-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "344b4125-6848-4985-b722-8e9e589b1ab4" (UID: "344b4125-6848-4985-b722-8e9e589b1ab4"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.611336 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/344b4125-6848-4985-b722-8e9e589b1ab4-server-conf" (OuterVolumeSpecName: "server-conf") pod "344b4125-6848-4985-b722-8e9e589b1ab4" (UID: "344b4125-6848-4985-b722-8e9e589b1ab4"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.630545 4693 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.630582 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldmwx\" (UniqueName: \"kubernetes.io/projected/344b4125-6848-4985-b722-8e9e589b1ab4-kube-api-access-ldmwx\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.630596 4693 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/344b4125-6848-4985-b722-8e9e589b1ab4-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.630604 4693 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/344b4125-6848-4985-b722-8e9e589b1ab4-pod-info\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.630681 4693 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/344b4125-6848-4985-b722-8e9e589b1ab4-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.630690 4693 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/344b4125-6848-4985-b722-8e9e589b1ab4-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.630718 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/344b4125-6848-4985-b722-8e9e589b1ab4-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.630728 4693 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/344b4125-6848-4985-b722-8e9e589b1ab4-server-conf\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.633195 4693 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/344b4125-6848-4985-b722-8e9e589b1ab4-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.633215 4693 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/344b4125-6848-4985-b722-8e9e589b1ab4-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.653133 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/344b4125-6848-4985-b722-8e9e589b1ab4-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "344b4125-6848-4985-b722-8e9e589b1ab4" (UID: "344b4125-6848-4985-b722-8e9e589b1ab4"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.661935 4693 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.736631 4693 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/344b4125-6848-4985-b722-8e9e589b1ab4-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.736701 4693 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.758991 4693 generic.go:334] "Generic (PLEG): container finished" podID="344b4125-6848-4985-b722-8e9e589b1ab4" containerID="0b380042086b13851d1db0c9f16aba42e574a7a4e45d02dddcf5aa3026a8cd53" exitCode=0 Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.759047 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"344b4125-6848-4985-b722-8e9e589b1ab4","Type":"ContainerDied","Data":"0b380042086b13851d1db0c9f16aba42e574a7a4e45d02dddcf5aa3026a8cd53"} Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.759077 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"344b4125-6848-4985-b722-8e9e589b1ab4","Type":"ContainerDied","Data":"13469022dc950a145e4d8c3dc49a996a13fc12936fa60f1e6de9e9e27bca0f32"} Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.759097 4693 scope.go:117] "RemoveContainer" containerID="0b380042086b13851d1db0c9f16aba42e574a7a4e45d02dddcf5aa3026a8cd53" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.759104 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.794695 4693 scope.go:117] "RemoveContainer" containerID="59b5f5fb3f59acdbcb78be6d525d4a9569344861cb9fbcb63563f2eda385b2f5" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.823982 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.836928 4693 scope.go:117] "RemoveContainer" containerID="0b380042086b13851d1db0c9f16aba42e574a7a4e45d02dddcf5aa3026a8cd53" Oct 08 07:37:17 crc kubenswrapper[4693]: E1008 07:37:17.838690 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b380042086b13851d1db0c9f16aba42e574a7a4e45d02dddcf5aa3026a8cd53\": container with ID starting with 0b380042086b13851d1db0c9f16aba42e574a7a4e45d02dddcf5aa3026a8cd53 not found: ID does not exist" containerID="0b380042086b13851d1db0c9f16aba42e574a7a4e45d02dddcf5aa3026a8cd53" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.838769 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b380042086b13851d1db0c9f16aba42e574a7a4e45d02dddcf5aa3026a8cd53"} err="failed to get container status \"0b380042086b13851d1db0c9f16aba42e574a7a4e45d02dddcf5aa3026a8cd53\": rpc error: code = NotFound desc = could not find container \"0b380042086b13851d1db0c9f16aba42e574a7a4e45d02dddcf5aa3026a8cd53\": container with ID starting with 0b380042086b13851d1db0c9f16aba42e574a7a4e45d02dddcf5aa3026a8cd53 not found: ID does not exist" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.838807 4693 scope.go:117] "RemoveContainer" containerID="59b5f5fb3f59acdbcb78be6d525d4a9569344861cb9fbcb63563f2eda385b2f5" Oct 08 07:37:17 crc kubenswrapper[4693]: E1008 07:37:17.839319 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59b5f5fb3f59acdbcb78be6d525d4a9569344861cb9fbcb63563f2eda385b2f5\": container with ID starting with 59b5f5fb3f59acdbcb78be6d525d4a9569344861cb9fbcb63563f2eda385b2f5 not found: ID does not exist" containerID="59b5f5fb3f59acdbcb78be6d525d4a9569344861cb9fbcb63563f2eda385b2f5" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.839369 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59b5f5fb3f59acdbcb78be6d525d4a9569344861cb9fbcb63563f2eda385b2f5"} err="failed to get container status \"59b5f5fb3f59acdbcb78be6d525d4a9569344861cb9fbcb63563f2eda385b2f5\": rpc error: code = NotFound desc = could not find container \"59b5f5fb3f59acdbcb78be6d525d4a9569344861cb9fbcb63563f2eda385b2f5\": container with ID starting with 59b5f5fb3f59acdbcb78be6d525d4a9569344861cb9fbcb63563f2eda385b2f5 not found: ID does not exist" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.851065 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.874928 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 08 07:37:17 crc kubenswrapper[4693]: E1008 07:37:17.875383 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="344b4125-6848-4985-b722-8e9e589b1ab4" containerName="rabbitmq" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.875400 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="344b4125-6848-4985-b722-8e9e589b1ab4" containerName="rabbitmq" Oct 08 07:37:17 crc kubenswrapper[4693]: E1008 07:37:17.875422 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="344b4125-6848-4985-b722-8e9e589b1ab4" containerName="setup-container" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.875429 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="344b4125-6848-4985-b722-8e9e589b1ab4" containerName="setup-container" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.875609 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="344b4125-6848-4985-b722-8e9e589b1ab4" containerName="rabbitmq" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.876656 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.879439 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.879632 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-rrkgx" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.879777 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.879965 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.880129 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.880245 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.880355 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 08 07:37:17 crc kubenswrapper[4693]: I1008 07:37:17.880994 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.043614 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.044034 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.044069 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-657lq\" (UniqueName: \"kubernetes.io/projected/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-kube-api-access-657lq\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.044105 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.044138 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.044187 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.044219 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.044267 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-config-data\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.044319 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.044353 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.044391 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.145969 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.146019 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.146051 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.146107 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.146149 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.146175 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-657lq\" (UniqueName: \"kubernetes.io/projected/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-kube-api-access-657lq\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.146219 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.146247 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.146290 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.146319 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.146362 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-config-data\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.147125 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.147654 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.147857 4693 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.148180 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.148376 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.149471 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-config-data\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.150971 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.151107 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.157163 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.164568 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.169416 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-657lq\" (UniqueName: \"kubernetes.io/projected/c40b5a1f-c5fc-4885-9816-b7b2cfc98423-kube-api-access-657lq\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.187917 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"c40b5a1f-c5fc-4885-9816-b7b2cfc98423\") " pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.206383 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.613968 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.760736 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cc3c1ad2-7355-4db4-af71-27c3454a025c-rabbitmq-tls\") pod \"cc3c1ad2-7355-4db4-af71-27c3454a025c\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.760869 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cc3c1ad2-7355-4db4-af71-27c3454a025c-rabbitmq-erlang-cookie\") pod \"cc3c1ad2-7355-4db4-af71-27c3454a025c\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.760893 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cc3c1ad2-7355-4db4-af71-27c3454a025c-erlang-cookie-secret\") pod \"cc3c1ad2-7355-4db4-af71-27c3454a025c\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.760921 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jgk8m\" (UniqueName: \"kubernetes.io/projected/cc3c1ad2-7355-4db4-af71-27c3454a025c-kube-api-access-jgk8m\") pod \"cc3c1ad2-7355-4db4-af71-27c3454a025c\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.760963 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cc3c1ad2-7355-4db4-af71-27c3454a025c-rabbitmq-confd\") pod \"cc3c1ad2-7355-4db4-af71-27c3454a025c\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.760994 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cc3c1ad2-7355-4db4-af71-27c3454a025c-server-conf\") pod \"cc3c1ad2-7355-4db4-af71-27c3454a025c\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.761038 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"cc3c1ad2-7355-4db4-af71-27c3454a025c\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.761090 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cc3c1ad2-7355-4db4-af71-27c3454a025c-config-data\") pod \"cc3c1ad2-7355-4db4-af71-27c3454a025c\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.761127 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cc3c1ad2-7355-4db4-af71-27c3454a025c-pod-info\") pod \"cc3c1ad2-7355-4db4-af71-27c3454a025c\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.761146 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cc3c1ad2-7355-4db4-af71-27c3454a025c-rabbitmq-plugins\") pod \"cc3c1ad2-7355-4db4-af71-27c3454a025c\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.761195 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cc3c1ad2-7355-4db4-af71-27c3454a025c-plugins-conf\") pod \"cc3c1ad2-7355-4db4-af71-27c3454a025c\" (UID: \"cc3c1ad2-7355-4db4-af71-27c3454a025c\") " Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.762873 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc3c1ad2-7355-4db4-af71-27c3454a025c-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "cc3c1ad2-7355-4db4-af71-27c3454a025c" (UID: "cc3c1ad2-7355-4db4-af71-27c3454a025c"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.770001 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "cc3c1ad2-7355-4db4-af71-27c3454a025c" (UID: "cc3c1ad2-7355-4db4-af71-27c3454a025c"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.770031 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc3c1ad2-7355-4db4-af71-27c3454a025c-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "cc3c1ad2-7355-4db4-af71-27c3454a025c" (UID: "cc3c1ad2-7355-4db4-af71-27c3454a025c"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.770725 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc3c1ad2-7355-4db4-af71-27c3454a025c-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "cc3c1ad2-7355-4db4-af71-27c3454a025c" (UID: "cc3c1ad2-7355-4db4-af71-27c3454a025c"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.770732 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc3c1ad2-7355-4db4-af71-27c3454a025c-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "cc3c1ad2-7355-4db4-af71-27c3454a025c" (UID: "cc3c1ad2-7355-4db4-af71-27c3454a025c"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.771092 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc3c1ad2-7355-4db4-af71-27c3454a025c-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "cc3c1ad2-7355-4db4-af71-27c3454a025c" (UID: "cc3c1ad2-7355-4db4-af71-27c3454a025c"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.771411 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/cc3c1ad2-7355-4db4-af71-27c3454a025c-pod-info" (OuterVolumeSpecName: "pod-info") pod "cc3c1ad2-7355-4db4-af71-27c3454a025c" (UID: "cc3c1ad2-7355-4db4-af71-27c3454a025c"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.773196 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc3c1ad2-7355-4db4-af71-27c3454a025c-kube-api-access-jgk8m" (OuterVolumeSpecName: "kube-api-access-jgk8m") pod "cc3c1ad2-7355-4db4-af71-27c3454a025c" (UID: "cc3c1ad2-7355-4db4-af71-27c3454a025c"). InnerVolumeSpecName "kube-api-access-jgk8m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.791012 4693 generic.go:334] "Generic (PLEG): container finished" podID="cc3c1ad2-7355-4db4-af71-27c3454a025c" containerID="5b9ae188c9c7a09d4da4cf074043c5806d749edf48accbeaacfe0738b87c4545" exitCode=0 Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.791071 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cc3c1ad2-7355-4db4-af71-27c3454a025c","Type":"ContainerDied","Data":"5b9ae188c9c7a09d4da4cf074043c5806d749edf48accbeaacfe0738b87c4545"} Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.791098 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cc3c1ad2-7355-4db4-af71-27c3454a025c","Type":"ContainerDied","Data":"62bbeea0cfe67b31b756797f380232d1cc991b442a5b9a18190cff44b372edce"} Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.791114 4693 scope.go:117] "RemoveContainer" containerID="5b9ae188c9c7a09d4da4cf074043c5806d749edf48accbeaacfe0738b87c4545" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.791227 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.797644 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 08 07:37:18 crc kubenswrapper[4693]: W1008 07:37:18.803292 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc40b5a1f_c5fc_4885_9816_b7b2cfc98423.slice/crio-20eaeee8d7b26504a782476a9142ffdc531caea962a5c5c75b00cf03043fd541 WatchSource:0}: Error finding container 20eaeee8d7b26504a782476a9142ffdc531caea962a5c5c75b00cf03043fd541: Status 404 returned error can't find the container with id 20eaeee8d7b26504a782476a9142ffdc531caea962a5c5c75b00cf03043fd541 Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.818565 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc3c1ad2-7355-4db4-af71-27c3454a025c-config-data" (OuterVolumeSpecName: "config-data") pod "cc3c1ad2-7355-4db4-af71-27c3454a025c" (UID: "cc3c1ad2-7355-4db4-af71-27c3454a025c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.822647 4693 scope.go:117] "RemoveContainer" containerID="1bba1bea243da621ea1b6179ad5db08abecc8d2aa48bc3f641980d1431dcd318" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.825953 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc3c1ad2-7355-4db4-af71-27c3454a025c-server-conf" (OuterVolumeSpecName: "server-conf") pod "cc3c1ad2-7355-4db4-af71-27c3454a025c" (UID: "cc3c1ad2-7355-4db4-af71-27c3454a025c"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.850051 4693 scope.go:117] "RemoveContainer" containerID="5b9ae188c9c7a09d4da4cf074043c5806d749edf48accbeaacfe0738b87c4545" Oct 08 07:37:18 crc kubenswrapper[4693]: E1008 07:37:18.850419 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b9ae188c9c7a09d4da4cf074043c5806d749edf48accbeaacfe0738b87c4545\": container with ID starting with 5b9ae188c9c7a09d4da4cf074043c5806d749edf48accbeaacfe0738b87c4545 not found: ID does not exist" containerID="5b9ae188c9c7a09d4da4cf074043c5806d749edf48accbeaacfe0738b87c4545" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.850594 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b9ae188c9c7a09d4da4cf074043c5806d749edf48accbeaacfe0738b87c4545"} err="failed to get container status \"5b9ae188c9c7a09d4da4cf074043c5806d749edf48accbeaacfe0738b87c4545\": rpc error: code = NotFound desc = could not find container \"5b9ae188c9c7a09d4da4cf074043c5806d749edf48accbeaacfe0738b87c4545\": container with ID starting with 5b9ae188c9c7a09d4da4cf074043c5806d749edf48accbeaacfe0738b87c4545 not found: ID does not exist" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.850727 4693 scope.go:117] "RemoveContainer" containerID="1bba1bea243da621ea1b6179ad5db08abecc8d2aa48bc3f641980d1431dcd318" Oct 08 07:37:18 crc kubenswrapper[4693]: E1008 07:37:18.852549 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bba1bea243da621ea1b6179ad5db08abecc8d2aa48bc3f641980d1431dcd318\": container with ID starting with 1bba1bea243da621ea1b6179ad5db08abecc8d2aa48bc3f641980d1431dcd318 not found: ID does not exist" containerID="1bba1bea243da621ea1b6179ad5db08abecc8d2aa48bc3f641980d1431dcd318" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.852596 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bba1bea243da621ea1b6179ad5db08abecc8d2aa48bc3f641980d1431dcd318"} err="failed to get container status \"1bba1bea243da621ea1b6179ad5db08abecc8d2aa48bc3f641980d1431dcd318\": rpc error: code = NotFound desc = could not find container \"1bba1bea243da621ea1b6179ad5db08abecc8d2aa48bc3f641980d1431dcd318\": container with ID starting with 1bba1bea243da621ea1b6179ad5db08abecc8d2aa48bc3f641980d1431dcd318 not found: ID does not exist" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.863171 4693 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cc3c1ad2-7355-4db4-af71-27c3454a025c-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.863349 4693 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cc3c1ad2-7355-4db4-af71-27c3454a025c-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.863427 4693 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cc3c1ad2-7355-4db4-af71-27c3454a025c-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.863527 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jgk8m\" (UniqueName: \"kubernetes.io/projected/cc3c1ad2-7355-4db4-af71-27c3454a025c-kube-api-access-jgk8m\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.863604 4693 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cc3c1ad2-7355-4db4-af71-27c3454a025c-server-conf\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.863706 4693 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.863782 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cc3c1ad2-7355-4db4-af71-27c3454a025c-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.863959 4693 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cc3c1ad2-7355-4db4-af71-27c3454a025c-pod-info\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.864038 4693 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cc3c1ad2-7355-4db4-af71-27c3454a025c-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.864195 4693 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cc3c1ad2-7355-4db4-af71-27c3454a025c-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.886200 4693 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.912704 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc3c1ad2-7355-4db4-af71-27c3454a025c-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "cc3c1ad2-7355-4db4-af71-27c3454a025c" (UID: "cc3c1ad2-7355-4db4-af71-27c3454a025c"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.966142 4693 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cc3c1ad2-7355-4db4-af71-27c3454a025c-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:18 crc kubenswrapper[4693]: I1008 07:37:18.966170 4693 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.129867 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.139384 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.152325 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 08 07:37:19 crc kubenswrapper[4693]: E1008 07:37:19.152680 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc3c1ad2-7355-4db4-af71-27c3454a025c" containerName="rabbitmq" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.152695 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc3c1ad2-7355-4db4-af71-27c3454a025c" containerName="rabbitmq" Oct 08 07:37:19 crc kubenswrapper[4693]: E1008 07:37:19.152713 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc3c1ad2-7355-4db4-af71-27c3454a025c" containerName="setup-container" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.152719 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc3c1ad2-7355-4db4-af71-27c3454a025c" containerName="setup-container" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.152920 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc3c1ad2-7355-4db4-af71-27c3454a025c" containerName="rabbitmq" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.153855 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.156250 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-dk6wq" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.156409 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.156479 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.156876 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.156893 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.160060 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.162048 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.177353 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.271431 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.271531 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.271587 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.271672 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.271726 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.271778 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.271912 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.272128 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qjdf\" (UniqueName: \"kubernetes.io/projected/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-kube-api-access-7qjdf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.272198 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.272300 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.272632 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.374350 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.374418 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.374459 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.374492 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.374546 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.374572 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.374620 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.374678 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.374773 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qjdf\" (UniqueName: \"kubernetes.io/projected/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-kube-api-access-7qjdf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.374799 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.374848 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.376010 4693 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.377023 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="344b4125-6848-4985-b722-8e9e589b1ab4" path="/var/lib/kubelet/pods/344b4125-6848-4985-b722-8e9e589b1ab4/volumes" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.378500 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.378529 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.378704 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.378757 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.379888 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.380019 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc3c1ad2-7355-4db4-af71-27c3454a025c" path="/var/lib/kubelet/pods/cc3c1ad2-7355-4db4-af71-27c3454a025c/volumes" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.383514 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.384770 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.385295 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.390634 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.397236 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qjdf\" (UniqueName: \"kubernetes.io/projected/1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a-kube-api-access-7qjdf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.411144 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.474060 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.804185 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c40b5a1f-c5fc-4885-9816-b7b2cfc98423","Type":"ContainerStarted","Data":"20eaeee8d7b26504a782476a9142ffdc531caea962a5c5c75b00cf03043fd541"} Oct 08 07:37:19 crc kubenswrapper[4693]: I1008 07:37:19.963401 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 08 07:37:19 crc kubenswrapper[4693]: W1008 07:37:19.977595 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1cb1ac5f_3a31_47eb_8ed2_99d0934d2e8a.slice/crio-54c94f32348cc5908d8f915623aeb3d24584e80bb25a320685cbace4f4b441cc WatchSource:0}: Error finding container 54c94f32348cc5908d8f915623aeb3d24584e80bb25a320685cbace4f4b441cc: Status 404 returned error can't find the container with id 54c94f32348cc5908d8f915623aeb3d24584e80bb25a320685cbace4f4b441cc Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.498849 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-4g8fd"] Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.500797 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.503125 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.528752 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-4g8fd"] Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.597636 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-config\") pod \"dnsmasq-dns-79bd4cc8c9-4g8fd\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.597680 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-dns-svc\") pod \"dnsmasq-dns-79bd4cc8c9-4g8fd\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.597760 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-openstack-edpm-ipam\") pod \"dnsmasq-dns-79bd4cc8c9-4g8fd\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.597801 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-dns-swift-storage-0\") pod \"dnsmasq-dns-79bd4cc8c9-4g8fd\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.597862 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-ovsdbserver-nb\") pod \"dnsmasq-dns-79bd4cc8c9-4g8fd\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.598000 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-ovsdbserver-sb\") pod \"dnsmasq-dns-79bd4cc8c9-4g8fd\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.598132 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2q88n\" (UniqueName: \"kubernetes.io/projected/4f01f14f-2a3d-422e-982c-68e0faaa9229-kube-api-access-2q88n\") pod \"dnsmasq-dns-79bd4cc8c9-4g8fd\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.700120 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-openstack-edpm-ipam\") pod \"dnsmasq-dns-79bd4cc8c9-4g8fd\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.700189 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-dns-swift-storage-0\") pod \"dnsmasq-dns-79bd4cc8c9-4g8fd\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.700245 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-ovsdbserver-nb\") pod \"dnsmasq-dns-79bd4cc8c9-4g8fd\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.700282 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-ovsdbserver-sb\") pod \"dnsmasq-dns-79bd4cc8c9-4g8fd\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.700328 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2q88n\" (UniqueName: \"kubernetes.io/projected/4f01f14f-2a3d-422e-982c-68e0faaa9229-kube-api-access-2q88n\") pod \"dnsmasq-dns-79bd4cc8c9-4g8fd\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.700399 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-config\") pod \"dnsmasq-dns-79bd4cc8c9-4g8fd\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.700419 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-dns-svc\") pod \"dnsmasq-dns-79bd4cc8c9-4g8fd\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.701233 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-ovsdbserver-sb\") pod \"dnsmasq-dns-79bd4cc8c9-4g8fd\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.701264 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-openstack-edpm-ipam\") pod \"dnsmasq-dns-79bd4cc8c9-4g8fd\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.701291 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-dns-swift-storage-0\") pod \"dnsmasq-dns-79bd4cc8c9-4g8fd\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.701600 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-dns-svc\") pod \"dnsmasq-dns-79bd4cc8c9-4g8fd\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.701755 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-config\") pod \"dnsmasq-dns-79bd4cc8c9-4g8fd\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.701838 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-ovsdbserver-nb\") pod \"dnsmasq-dns-79bd4cc8c9-4g8fd\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.719668 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2q88n\" (UniqueName: \"kubernetes.io/projected/4f01f14f-2a3d-422e-982c-68e0faaa9229-kube-api-access-2q88n\") pod \"dnsmasq-dns-79bd4cc8c9-4g8fd\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.816334 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c40b5a1f-c5fc-4885-9816-b7b2cfc98423","Type":"ContainerStarted","Data":"0a67fe839a5bf99724abc15f58b786acddf6d4da40b75f6c2a12458a44736b64"} Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.818085 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a","Type":"ContainerStarted","Data":"54c94f32348cc5908d8f915623aeb3d24584e80bb25a320685cbace4f4b441cc"} Oct 08 07:37:20 crc kubenswrapper[4693]: I1008 07:37:20.832617 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" Oct 08 07:37:21 crc kubenswrapper[4693]: I1008 07:37:21.107988 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-4g8fd"] Oct 08 07:37:21 crc kubenswrapper[4693]: W1008 07:37:21.110581 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4f01f14f_2a3d_422e_982c_68e0faaa9229.slice/crio-23db70ba6b7cfab474c6b6b42edd33b775afa9e761147f4b0aeb675ac4a673d1 WatchSource:0}: Error finding container 23db70ba6b7cfab474c6b6b42edd33b775afa9e761147f4b0aeb675ac4a673d1: Status 404 returned error can't find the container with id 23db70ba6b7cfab474c6b6b42edd33b775afa9e761147f4b0aeb675ac4a673d1 Oct 08 07:37:21 crc kubenswrapper[4693]: I1008 07:37:21.843965 4693 generic.go:334] "Generic (PLEG): container finished" podID="4f01f14f-2a3d-422e-982c-68e0faaa9229" containerID="d3cc33832c28f40815ee4da526a7d039e3ac138dc8b80817de4452a700e92711" exitCode=0 Oct 08 07:37:21 crc kubenswrapper[4693]: I1008 07:37:21.844211 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" event={"ID":"4f01f14f-2a3d-422e-982c-68e0faaa9229","Type":"ContainerDied","Data":"d3cc33832c28f40815ee4da526a7d039e3ac138dc8b80817de4452a700e92711"} Oct 08 07:37:21 crc kubenswrapper[4693]: I1008 07:37:21.844544 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" event={"ID":"4f01f14f-2a3d-422e-982c-68e0faaa9229","Type":"ContainerStarted","Data":"23db70ba6b7cfab474c6b6b42edd33b775afa9e761147f4b0aeb675ac4a673d1"} Oct 08 07:37:21 crc kubenswrapper[4693]: I1008 07:37:21.849919 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a","Type":"ContainerStarted","Data":"35872852c57162e89de2cd329dba38a10f00b1a538f596fafceef1de4849a315"} Oct 08 07:37:22 crc kubenswrapper[4693]: I1008 07:37:22.859409 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" event={"ID":"4f01f14f-2a3d-422e-982c-68e0faaa9229","Type":"ContainerStarted","Data":"294a98d3328d6398c51b1b0558a7894d9d87f0979d755f78418aebc731381e4a"} Oct 08 07:37:22 crc kubenswrapper[4693]: I1008 07:37:22.895672 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" podStartSLOduration=2.895654772 podStartE2EDuration="2.895654772s" podCreationTimestamp="2025-10-08 07:37:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:37:22.886842308 +0000 UTC m=+1228.257807243" watchObservedRunningTime="2025-10-08 07:37:22.895654772 +0000 UTC m=+1228.266619707" Oct 08 07:37:23 crc kubenswrapper[4693]: I1008 07:37:23.489904 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:37:23 crc kubenswrapper[4693]: I1008 07:37:23.490010 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:37:23 crc kubenswrapper[4693]: I1008 07:37:23.868198 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" Oct 08 07:37:30 crc kubenswrapper[4693]: I1008 07:37:30.834047 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" Oct 08 07:37:30 crc kubenswrapper[4693]: I1008 07:37:30.888847 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-r9btl"] Oct 08 07:37:30 crc kubenswrapper[4693]: I1008 07:37:30.889392 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" podUID="d423401e-dd93-4c22-a0f4-1af916d772a5" containerName="dnsmasq-dns" containerID="cri-o://d942f8973c8c8b443484efc661f64ec7226535f070cdef83abb0a7a509ec243f" gracePeriod=10 Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.061960 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55478c4467-2nfsx"] Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.063469 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55478c4467-2nfsx" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.081161 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55478c4467-2nfsx"] Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.125697 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/77ca5c51-5f17-4793-897e-235a54c041c2-ovsdbserver-sb\") pod \"dnsmasq-dns-55478c4467-2nfsx\" (UID: \"77ca5c51-5f17-4793-897e-235a54c041c2\") " pod="openstack/dnsmasq-dns-55478c4467-2nfsx" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.125778 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/77ca5c51-5f17-4793-897e-235a54c041c2-dns-svc\") pod \"dnsmasq-dns-55478c4467-2nfsx\" (UID: \"77ca5c51-5f17-4793-897e-235a54c041c2\") " pod="openstack/dnsmasq-dns-55478c4467-2nfsx" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.125838 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdfcj\" (UniqueName: \"kubernetes.io/projected/77ca5c51-5f17-4793-897e-235a54c041c2-kube-api-access-wdfcj\") pod \"dnsmasq-dns-55478c4467-2nfsx\" (UID: \"77ca5c51-5f17-4793-897e-235a54c041c2\") " pod="openstack/dnsmasq-dns-55478c4467-2nfsx" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.126183 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77ca5c51-5f17-4793-897e-235a54c041c2-config\") pod \"dnsmasq-dns-55478c4467-2nfsx\" (UID: \"77ca5c51-5f17-4793-897e-235a54c041c2\") " pod="openstack/dnsmasq-dns-55478c4467-2nfsx" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.126237 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/77ca5c51-5f17-4793-897e-235a54c041c2-openstack-edpm-ipam\") pod \"dnsmasq-dns-55478c4467-2nfsx\" (UID: \"77ca5c51-5f17-4793-897e-235a54c041c2\") " pod="openstack/dnsmasq-dns-55478c4467-2nfsx" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.126308 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/77ca5c51-5f17-4793-897e-235a54c041c2-dns-swift-storage-0\") pod \"dnsmasq-dns-55478c4467-2nfsx\" (UID: \"77ca5c51-5f17-4793-897e-235a54c041c2\") " pod="openstack/dnsmasq-dns-55478c4467-2nfsx" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.126521 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/77ca5c51-5f17-4793-897e-235a54c041c2-ovsdbserver-nb\") pod \"dnsmasq-dns-55478c4467-2nfsx\" (UID: \"77ca5c51-5f17-4793-897e-235a54c041c2\") " pod="openstack/dnsmasq-dns-55478c4467-2nfsx" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.229021 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/77ca5c51-5f17-4793-897e-235a54c041c2-dns-svc\") pod \"dnsmasq-dns-55478c4467-2nfsx\" (UID: \"77ca5c51-5f17-4793-897e-235a54c041c2\") " pod="openstack/dnsmasq-dns-55478c4467-2nfsx" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.229108 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdfcj\" (UniqueName: \"kubernetes.io/projected/77ca5c51-5f17-4793-897e-235a54c041c2-kube-api-access-wdfcj\") pod \"dnsmasq-dns-55478c4467-2nfsx\" (UID: \"77ca5c51-5f17-4793-897e-235a54c041c2\") " pod="openstack/dnsmasq-dns-55478c4467-2nfsx" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.229142 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77ca5c51-5f17-4793-897e-235a54c041c2-config\") pod \"dnsmasq-dns-55478c4467-2nfsx\" (UID: \"77ca5c51-5f17-4793-897e-235a54c041c2\") " pod="openstack/dnsmasq-dns-55478c4467-2nfsx" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.229178 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/77ca5c51-5f17-4793-897e-235a54c041c2-openstack-edpm-ipam\") pod \"dnsmasq-dns-55478c4467-2nfsx\" (UID: \"77ca5c51-5f17-4793-897e-235a54c041c2\") " pod="openstack/dnsmasq-dns-55478c4467-2nfsx" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.229206 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/77ca5c51-5f17-4793-897e-235a54c041c2-dns-swift-storage-0\") pod \"dnsmasq-dns-55478c4467-2nfsx\" (UID: \"77ca5c51-5f17-4793-897e-235a54c041c2\") " pod="openstack/dnsmasq-dns-55478c4467-2nfsx" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.229266 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/77ca5c51-5f17-4793-897e-235a54c041c2-ovsdbserver-nb\") pod \"dnsmasq-dns-55478c4467-2nfsx\" (UID: \"77ca5c51-5f17-4793-897e-235a54c041c2\") " pod="openstack/dnsmasq-dns-55478c4467-2nfsx" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.229320 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/77ca5c51-5f17-4793-897e-235a54c041c2-ovsdbserver-sb\") pod \"dnsmasq-dns-55478c4467-2nfsx\" (UID: \"77ca5c51-5f17-4793-897e-235a54c041c2\") " pod="openstack/dnsmasq-dns-55478c4467-2nfsx" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.230314 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/77ca5c51-5f17-4793-897e-235a54c041c2-ovsdbserver-sb\") pod \"dnsmasq-dns-55478c4467-2nfsx\" (UID: \"77ca5c51-5f17-4793-897e-235a54c041c2\") " pod="openstack/dnsmasq-dns-55478c4467-2nfsx" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.230398 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/77ca5c51-5f17-4793-897e-235a54c041c2-dns-svc\") pod \"dnsmasq-dns-55478c4467-2nfsx\" (UID: \"77ca5c51-5f17-4793-897e-235a54c041c2\") " pod="openstack/dnsmasq-dns-55478c4467-2nfsx" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.230514 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/77ca5c51-5f17-4793-897e-235a54c041c2-openstack-edpm-ipam\") pod \"dnsmasq-dns-55478c4467-2nfsx\" (UID: \"77ca5c51-5f17-4793-897e-235a54c041c2\") " pod="openstack/dnsmasq-dns-55478c4467-2nfsx" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.230948 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/77ca5c51-5f17-4793-897e-235a54c041c2-dns-swift-storage-0\") pod \"dnsmasq-dns-55478c4467-2nfsx\" (UID: \"77ca5c51-5f17-4793-897e-235a54c041c2\") " pod="openstack/dnsmasq-dns-55478c4467-2nfsx" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.231025 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77ca5c51-5f17-4793-897e-235a54c041c2-config\") pod \"dnsmasq-dns-55478c4467-2nfsx\" (UID: \"77ca5c51-5f17-4793-897e-235a54c041c2\") " pod="openstack/dnsmasq-dns-55478c4467-2nfsx" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.231302 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/77ca5c51-5f17-4793-897e-235a54c041c2-ovsdbserver-nb\") pod \"dnsmasq-dns-55478c4467-2nfsx\" (UID: \"77ca5c51-5f17-4793-897e-235a54c041c2\") " pod="openstack/dnsmasq-dns-55478c4467-2nfsx" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.262158 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdfcj\" (UniqueName: \"kubernetes.io/projected/77ca5c51-5f17-4793-897e-235a54c041c2-kube-api-access-wdfcj\") pod \"dnsmasq-dns-55478c4467-2nfsx\" (UID: \"77ca5c51-5f17-4793-897e-235a54c041c2\") " pod="openstack/dnsmasq-dns-55478c4467-2nfsx" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.352822 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.382342 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55478c4467-2nfsx" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.434292 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-ovsdbserver-nb\") pod \"d423401e-dd93-4c22-a0f4-1af916d772a5\" (UID: \"d423401e-dd93-4c22-a0f4-1af916d772a5\") " Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.434356 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-dns-svc\") pod \"d423401e-dd93-4c22-a0f4-1af916d772a5\" (UID: \"d423401e-dd93-4c22-a0f4-1af916d772a5\") " Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.434530 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ttcxc\" (UniqueName: \"kubernetes.io/projected/d423401e-dd93-4c22-a0f4-1af916d772a5-kube-api-access-ttcxc\") pod \"d423401e-dd93-4c22-a0f4-1af916d772a5\" (UID: \"d423401e-dd93-4c22-a0f4-1af916d772a5\") " Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.434569 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-config\") pod \"d423401e-dd93-4c22-a0f4-1af916d772a5\" (UID: \"d423401e-dd93-4c22-a0f4-1af916d772a5\") " Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.434628 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-ovsdbserver-sb\") pod \"d423401e-dd93-4c22-a0f4-1af916d772a5\" (UID: \"d423401e-dd93-4c22-a0f4-1af916d772a5\") " Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.434650 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-dns-swift-storage-0\") pod \"d423401e-dd93-4c22-a0f4-1af916d772a5\" (UID: \"d423401e-dd93-4c22-a0f4-1af916d772a5\") " Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.445574 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d423401e-dd93-4c22-a0f4-1af916d772a5-kube-api-access-ttcxc" (OuterVolumeSpecName: "kube-api-access-ttcxc") pod "d423401e-dd93-4c22-a0f4-1af916d772a5" (UID: "d423401e-dd93-4c22-a0f4-1af916d772a5"). InnerVolumeSpecName "kube-api-access-ttcxc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.494135 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d423401e-dd93-4c22-a0f4-1af916d772a5" (UID: "d423401e-dd93-4c22-a0f4-1af916d772a5"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.499553 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-config" (OuterVolumeSpecName: "config") pod "d423401e-dd93-4c22-a0f4-1af916d772a5" (UID: "d423401e-dd93-4c22-a0f4-1af916d772a5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.520028 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d423401e-dd93-4c22-a0f4-1af916d772a5" (UID: "d423401e-dd93-4c22-a0f4-1af916d772a5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.521281 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d423401e-dd93-4c22-a0f4-1af916d772a5" (UID: "d423401e-dd93-4c22-a0f4-1af916d772a5"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.527287 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d423401e-dd93-4c22-a0f4-1af916d772a5" (UID: "d423401e-dd93-4c22-a0f4-1af916d772a5"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.537066 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ttcxc\" (UniqueName: \"kubernetes.io/projected/d423401e-dd93-4c22-a0f4-1af916d772a5-kube-api-access-ttcxc\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.537094 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.537105 4693 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.537113 4693 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.537121 4693 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.537129 4693 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d423401e-dd93-4c22-a0f4-1af916d772a5-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.880461 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55478c4467-2nfsx"] Oct 08 07:37:31 crc kubenswrapper[4693]: W1008 07:37:31.884553 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod77ca5c51_5f17_4793_897e_235a54c041c2.slice/crio-3d0b1bf463fcdb388c06e1b8c81b85c27739d1a949e927df9471c811fb3854ab WatchSource:0}: Error finding container 3d0b1bf463fcdb388c06e1b8c81b85c27739d1a949e927df9471c811fb3854ab: Status 404 returned error can't find the container with id 3d0b1bf463fcdb388c06e1b8c81b85c27739d1a949e927df9471c811fb3854ab Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.968800 4693 generic.go:334] "Generic (PLEG): container finished" podID="d423401e-dd93-4c22-a0f4-1af916d772a5" containerID="d942f8973c8c8b443484efc661f64ec7226535f070cdef83abb0a7a509ec243f" exitCode=0 Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.968868 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" event={"ID":"d423401e-dd93-4c22-a0f4-1af916d772a5","Type":"ContainerDied","Data":"d942f8973c8c8b443484efc661f64ec7226535f070cdef83abb0a7a509ec243f"} Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.968926 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" event={"ID":"d423401e-dd93-4c22-a0f4-1af916d772a5","Type":"ContainerDied","Data":"6228df7d5147e9425aa617bdb3c1c05758ed17075159123971246436617665e4"} Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.968945 4693 scope.go:117] "RemoveContainer" containerID="d942f8973c8c8b443484efc661f64ec7226535f070cdef83abb0a7a509ec243f" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.968940 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-r9btl" Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.969694 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55478c4467-2nfsx" event={"ID":"77ca5c51-5f17-4793-897e-235a54c041c2","Type":"ContainerStarted","Data":"3d0b1bf463fcdb388c06e1b8c81b85c27739d1a949e927df9471c811fb3854ab"} Oct 08 07:37:31 crc kubenswrapper[4693]: I1008 07:37:31.992565 4693 scope.go:117] "RemoveContainer" containerID="5e4ace75a34a01b5fad416889ab8a034d411c6db784164b3c165b50d8ad84a5e" Oct 08 07:37:32 crc kubenswrapper[4693]: I1008 07:37:32.009353 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-r9btl"] Oct 08 07:37:32 crc kubenswrapper[4693]: I1008 07:37:32.016903 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-r9btl"] Oct 08 07:37:32 crc kubenswrapper[4693]: I1008 07:37:32.082659 4693 scope.go:117] "RemoveContainer" containerID="d942f8973c8c8b443484efc661f64ec7226535f070cdef83abb0a7a509ec243f" Oct 08 07:37:32 crc kubenswrapper[4693]: E1008 07:37:32.083045 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d942f8973c8c8b443484efc661f64ec7226535f070cdef83abb0a7a509ec243f\": container with ID starting with d942f8973c8c8b443484efc661f64ec7226535f070cdef83abb0a7a509ec243f not found: ID does not exist" containerID="d942f8973c8c8b443484efc661f64ec7226535f070cdef83abb0a7a509ec243f" Oct 08 07:37:32 crc kubenswrapper[4693]: I1008 07:37:32.083073 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d942f8973c8c8b443484efc661f64ec7226535f070cdef83abb0a7a509ec243f"} err="failed to get container status \"d942f8973c8c8b443484efc661f64ec7226535f070cdef83abb0a7a509ec243f\": rpc error: code = NotFound desc = could not find container \"d942f8973c8c8b443484efc661f64ec7226535f070cdef83abb0a7a509ec243f\": container with ID starting with d942f8973c8c8b443484efc661f64ec7226535f070cdef83abb0a7a509ec243f not found: ID does not exist" Oct 08 07:37:32 crc kubenswrapper[4693]: I1008 07:37:32.083126 4693 scope.go:117] "RemoveContainer" containerID="5e4ace75a34a01b5fad416889ab8a034d411c6db784164b3c165b50d8ad84a5e" Oct 08 07:37:32 crc kubenswrapper[4693]: E1008 07:37:32.083487 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e4ace75a34a01b5fad416889ab8a034d411c6db784164b3c165b50d8ad84a5e\": container with ID starting with 5e4ace75a34a01b5fad416889ab8a034d411c6db784164b3c165b50d8ad84a5e not found: ID does not exist" containerID="5e4ace75a34a01b5fad416889ab8a034d411c6db784164b3c165b50d8ad84a5e" Oct 08 07:37:32 crc kubenswrapper[4693]: I1008 07:37:32.083526 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e4ace75a34a01b5fad416889ab8a034d411c6db784164b3c165b50d8ad84a5e"} err="failed to get container status \"5e4ace75a34a01b5fad416889ab8a034d411c6db784164b3c165b50d8ad84a5e\": rpc error: code = NotFound desc = could not find container \"5e4ace75a34a01b5fad416889ab8a034d411c6db784164b3c165b50d8ad84a5e\": container with ID starting with 5e4ace75a34a01b5fad416889ab8a034d411c6db784164b3c165b50d8ad84a5e not found: ID does not exist" Oct 08 07:37:32 crc kubenswrapper[4693]: I1008 07:37:32.987277 4693 generic.go:334] "Generic (PLEG): container finished" podID="77ca5c51-5f17-4793-897e-235a54c041c2" containerID="c8be32f682ce984bb5ecf9813b4bf069485a45da52ccf71bb763436f94447a24" exitCode=0 Oct 08 07:37:32 crc kubenswrapper[4693]: I1008 07:37:32.987339 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55478c4467-2nfsx" event={"ID":"77ca5c51-5f17-4793-897e-235a54c041c2","Type":"ContainerDied","Data":"c8be32f682ce984bb5ecf9813b4bf069485a45da52ccf71bb763436f94447a24"} Oct 08 07:37:33 crc kubenswrapper[4693]: I1008 07:37:33.376686 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d423401e-dd93-4c22-a0f4-1af916d772a5" path="/var/lib/kubelet/pods/d423401e-dd93-4c22-a0f4-1af916d772a5/volumes" Oct 08 07:37:34 crc kubenswrapper[4693]: I1008 07:37:34.004519 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55478c4467-2nfsx" event={"ID":"77ca5c51-5f17-4793-897e-235a54c041c2","Type":"ContainerStarted","Data":"eaaacf77a76c019363fc03b4c890ebc6dc307afe4230172ddce2821d94cdfdc4"} Oct 08 07:37:34 crc kubenswrapper[4693]: I1008 07:37:34.004896 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-55478c4467-2nfsx" Oct 08 07:37:34 crc kubenswrapper[4693]: I1008 07:37:34.027859 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-55478c4467-2nfsx" podStartSLOduration=3.027809078 podStartE2EDuration="3.027809078s" podCreationTimestamp="2025-10-08 07:37:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:37:34.02513211 +0000 UTC m=+1239.396097085" watchObservedRunningTime="2025-10-08 07:37:34.027809078 +0000 UTC m=+1239.398774043" Oct 08 07:37:41 crc kubenswrapper[4693]: I1008 07:37:41.390059 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-55478c4467-2nfsx" Oct 08 07:37:41 crc kubenswrapper[4693]: I1008 07:37:41.467419 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-4g8fd"] Oct 08 07:37:41 crc kubenswrapper[4693]: I1008 07:37:41.467706 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" podUID="4f01f14f-2a3d-422e-982c-68e0faaa9229" containerName="dnsmasq-dns" containerID="cri-o://294a98d3328d6398c51b1b0558a7894d9d87f0979d755f78418aebc731381e4a" gracePeriod=10 Oct 08 07:37:41 crc kubenswrapper[4693]: I1008 07:37:41.964946 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.054616 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-dns-swift-storage-0\") pod \"4f01f14f-2a3d-422e-982c-68e0faaa9229\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.055320 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-ovsdbserver-sb\") pod \"4f01f14f-2a3d-422e-982c-68e0faaa9229\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.055469 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-openstack-edpm-ipam\") pod \"4f01f14f-2a3d-422e-982c-68e0faaa9229\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.055629 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2q88n\" (UniqueName: \"kubernetes.io/projected/4f01f14f-2a3d-422e-982c-68e0faaa9229-kube-api-access-2q88n\") pod \"4f01f14f-2a3d-422e-982c-68e0faaa9229\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.055759 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-config\") pod \"4f01f14f-2a3d-422e-982c-68e0faaa9229\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.055896 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-ovsdbserver-nb\") pod \"4f01f14f-2a3d-422e-982c-68e0faaa9229\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.055994 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-dns-svc\") pod \"4f01f14f-2a3d-422e-982c-68e0faaa9229\" (UID: \"4f01f14f-2a3d-422e-982c-68e0faaa9229\") " Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.061256 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f01f14f-2a3d-422e-982c-68e0faaa9229-kube-api-access-2q88n" (OuterVolumeSpecName: "kube-api-access-2q88n") pod "4f01f14f-2a3d-422e-982c-68e0faaa9229" (UID: "4f01f14f-2a3d-422e-982c-68e0faaa9229"). InnerVolumeSpecName "kube-api-access-2q88n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.104181 4693 generic.go:334] "Generic (PLEG): container finished" podID="4f01f14f-2a3d-422e-982c-68e0faaa9229" containerID="294a98d3328d6398c51b1b0558a7894d9d87f0979d755f78418aebc731381e4a" exitCode=0 Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.104225 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" event={"ID":"4f01f14f-2a3d-422e-982c-68e0faaa9229","Type":"ContainerDied","Data":"294a98d3328d6398c51b1b0558a7894d9d87f0979d755f78418aebc731381e4a"} Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.104251 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" event={"ID":"4f01f14f-2a3d-422e-982c-68e0faaa9229","Type":"ContainerDied","Data":"23db70ba6b7cfab474c6b6b42edd33b775afa9e761147f4b0aeb675ac4a673d1"} Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.104267 4693 scope.go:117] "RemoveContainer" containerID="294a98d3328d6398c51b1b0558a7894d9d87f0979d755f78418aebc731381e4a" Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.104398 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-4g8fd" Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.125253 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "4f01f14f-2a3d-422e-982c-68e0faaa9229" (UID: "4f01f14f-2a3d-422e-982c-68e0faaa9229"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.129327 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4f01f14f-2a3d-422e-982c-68e0faaa9229" (UID: "4f01f14f-2a3d-422e-982c-68e0faaa9229"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.137493 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4f01f14f-2a3d-422e-982c-68e0faaa9229" (UID: "4f01f14f-2a3d-422e-982c-68e0faaa9229"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.144316 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4f01f14f-2a3d-422e-982c-68e0faaa9229" (UID: "4f01f14f-2a3d-422e-982c-68e0faaa9229"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.148195 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "4f01f14f-2a3d-422e-982c-68e0faaa9229" (UID: "4f01f14f-2a3d-422e-982c-68e0faaa9229"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.158004 4693 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.158032 4693 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.158042 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2q88n\" (UniqueName: \"kubernetes.io/projected/4f01f14f-2a3d-422e-982c-68e0faaa9229-kube-api-access-2q88n\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.158052 4693 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.158062 4693 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.158071 4693 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.162708 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-config" (OuterVolumeSpecName: "config") pod "4f01f14f-2a3d-422e-982c-68e0faaa9229" (UID: "4f01f14f-2a3d-422e-982c-68e0faaa9229"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.211456 4693 scope.go:117] "RemoveContainer" containerID="d3cc33832c28f40815ee4da526a7d039e3ac138dc8b80817de4452a700e92711" Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.241525 4693 scope.go:117] "RemoveContainer" containerID="294a98d3328d6398c51b1b0558a7894d9d87f0979d755f78418aebc731381e4a" Oct 08 07:37:42 crc kubenswrapper[4693]: E1008 07:37:42.242448 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"294a98d3328d6398c51b1b0558a7894d9d87f0979d755f78418aebc731381e4a\": container with ID starting with 294a98d3328d6398c51b1b0558a7894d9d87f0979d755f78418aebc731381e4a not found: ID does not exist" containerID="294a98d3328d6398c51b1b0558a7894d9d87f0979d755f78418aebc731381e4a" Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.242517 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"294a98d3328d6398c51b1b0558a7894d9d87f0979d755f78418aebc731381e4a"} err="failed to get container status \"294a98d3328d6398c51b1b0558a7894d9d87f0979d755f78418aebc731381e4a\": rpc error: code = NotFound desc = could not find container \"294a98d3328d6398c51b1b0558a7894d9d87f0979d755f78418aebc731381e4a\": container with ID starting with 294a98d3328d6398c51b1b0558a7894d9d87f0979d755f78418aebc731381e4a not found: ID does not exist" Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.242557 4693 scope.go:117] "RemoveContainer" containerID="d3cc33832c28f40815ee4da526a7d039e3ac138dc8b80817de4452a700e92711" Oct 08 07:37:42 crc kubenswrapper[4693]: E1008 07:37:42.243400 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3cc33832c28f40815ee4da526a7d039e3ac138dc8b80817de4452a700e92711\": container with ID starting with d3cc33832c28f40815ee4da526a7d039e3ac138dc8b80817de4452a700e92711 not found: ID does not exist" containerID="d3cc33832c28f40815ee4da526a7d039e3ac138dc8b80817de4452a700e92711" Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.243441 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3cc33832c28f40815ee4da526a7d039e3ac138dc8b80817de4452a700e92711"} err="failed to get container status \"d3cc33832c28f40815ee4da526a7d039e3ac138dc8b80817de4452a700e92711\": rpc error: code = NotFound desc = could not find container \"d3cc33832c28f40815ee4da526a7d039e3ac138dc8b80817de4452a700e92711\": container with ID starting with d3cc33832c28f40815ee4da526a7d039e3ac138dc8b80817de4452a700e92711 not found: ID does not exist" Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.261726 4693 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f01f14f-2a3d-422e-982c-68e0faaa9229-config\") on node \"crc\" DevicePath \"\"" Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.452893 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-4g8fd"] Oct 08 07:37:42 crc kubenswrapper[4693]: I1008 07:37:42.462736 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-4g8fd"] Oct 08 07:37:43 crc kubenswrapper[4693]: I1008 07:37:43.375071 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f01f14f-2a3d-422e-982c-68e0faaa9229" path="/var/lib/kubelet/pods/4f01f14f-2a3d-422e-982c-68e0faaa9229/volumes" Oct 08 07:37:53 crc kubenswrapper[4693]: I1008 07:37:53.232249 4693 generic.go:334] "Generic (PLEG): container finished" podID="c40b5a1f-c5fc-4885-9816-b7b2cfc98423" containerID="0a67fe839a5bf99724abc15f58b786acddf6d4da40b75f6c2a12458a44736b64" exitCode=0 Oct 08 07:37:53 crc kubenswrapper[4693]: I1008 07:37:53.232310 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c40b5a1f-c5fc-4885-9816-b7b2cfc98423","Type":"ContainerDied","Data":"0a67fe839a5bf99724abc15f58b786acddf6d4da40b75f6c2a12458a44736b64"} Oct 08 07:37:53 crc kubenswrapper[4693]: I1008 07:37:53.490370 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:37:53 crc kubenswrapper[4693]: I1008 07:37:53.490443 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.252775 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c40b5a1f-c5fc-4885-9816-b7b2cfc98423","Type":"ContainerStarted","Data":"8e9be79578a2d089ad547e83912f4d6c6567f9650e4ea5b91aef6803743fef52"} Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.253496 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.296918 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.296891484 podStartE2EDuration="37.296891484s" podCreationTimestamp="2025-10-08 07:37:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:37:54.290144753 +0000 UTC m=+1259.661109748" watchObservedRunningTime="2025-10-08 07:37:54.296891484 +0000 UTC m=+1259.667856459" Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.568571 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv"] Oct 08 07:37:54 crc kubenswrapper[4693]: E1008 07:37:54.569000 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f01f14f-2a3d-422e-982c-68e0faaa9229" containerName="init" Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.569020 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f01f14f-2a3d-422e-982c-68e0faaa9229" containerName="init" Oct 08 07:37:54 crc kubenswrapper[4693]: E1008 07:37:54.569038 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d423401e-dd93-4c22-a0f4-1af916d772a5" containerName="dnsmasq-dns" Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.569048 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="d423401e-dd93-4c22-a0f4-1af916d772a5" containerName="dnsmasq-dns" Oct 08 07:37:54 crc kubenswrapper[4693]: E1008 07:37:54.569068 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f01f14f-2a3d-422e-982c-68e0faaa9229" containerName="dnsmasq-dns" Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.569075 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f01f14f-2a3d-422e-982c-68e0faaa9229" containerName="dnsmasq-dns" Oct 08 07:37:54 crc kubenswrapper[4693]: E1008 07:37:54.569091 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d423401e-dd93-4c22-a0f4-1af916d772a5" containerName="init" Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.569096 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="d423401e-dd93-4c22-a0f4-1af916d772a5" containerName="init" Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.569293 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f01f14f-2a3d-422e-982c-68e0faaa9229" containerName="dnsmasq-dns" Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.569331 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="d423401e-dd93-4c22-a0f4-1af916d772a5" containerName="dnsmasq-dns" Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.570104 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv" Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.573044 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.573415 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.573546 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.573778 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zxm2d" Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.580052 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv"] Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.738109 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vsz9\" (UniqueName: \"kubernetes.io/projected/2213ef00-9e58-4d62-84f2-026ff39b7127-kube-api-access-9vsz9\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv\" (UID: \"2213ef00-9e58-4d62-84f2-026ff39b7127\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv" Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.738198 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2213ef00-9e58-4d62-84f2-026ff39b7127-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv\" (UID: \"2213ef00-9e58-4d62-84f2-026ff39b7127\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv" Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.738237 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2213ef00-9e58-4d62-84f2-026ff39b7127-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv\" (UID: \"2213ef00-9e58-4d62-84f2-026ff39b7127\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv" Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.738285 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2213ef00-9e58-4d62-84f2-026ff39b7127-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv\" (UID: \"2213ef00-9e58-4d62-84f2-026ff39b7127\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv" Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.839950 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vsz9\" (UniqueName: \"kubernetes.io/projected/2213ef00-9e58-4d62-84f2-026ff39b7127-kube-api-access-9vsz9\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv\" (UID: \"2213ef00-9e58-4d62-84f2-026ff39b7127\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv" Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.840014 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2213ef00-9e58-4d62-84f2-026ff39b7127-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv\" (UID: \"2213ef00-9e58-4d62-84f2-026ff39b7127\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv" Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.840035 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2213ef00-9e58-4d62-84f2-026ff39b7127-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv\" (UID: \"2213ef00-9e58-4d62-84f2-026ff39b7127\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv" Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.840063 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2213ef00-9e58-4d62-84f2-026ff39b7127-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv\" (UID: \"2213ef00-9e58-4d62-84f2-026ff39b7127\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv" Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.847467 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2213ef00-9e58-4d62-84f2-026ff39b7127-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv\" (UID: \"2213ef00-9e58-4d62-84f2-026ff39b7127\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv" Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.849335 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2213ef00-9e58-4d62-84f2-026ff39b7127-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv\" (UID: \"2213ef00-9e58-4d62-84f2-026ff39b7127\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv" Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.853167 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2213ef00-9e58-4d62-84f2-026ff39b7127-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv\" (UID: \"2213ef00-9e58-4d62-84f2-026ff39b7127\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv" Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.870500 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vsz9\" (UniqueName: \"kubernetes.io/projected/2213ef00-9e58-4d62-84f2-026ff39b7127-kube-api-access-9vsz9\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv\" (UID: \"2213ef00-9e58-4d62-84f2-026ff39b7127\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv" Oct 08 07:37:54 crc kubenswrapper[4693]: I1008 07:37:54.899219 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv" Oct 08 07:37:55 crc kubenswrapper[4693]: I1008 07:37:55.231170 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv"] Oct 08 07:37:55 crc kubenswrapper[4693]: W1008 07:37:55.232203 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2213ef00_9e58_4d62_84f2_026ff39b7127.slice/crio-6fcaf01a5ac509caa1cca285a320a3ec750000ba56e920dc8f8eb242f28becec WatchSource:0}: Error finding container 6fcaf01a5ac509caa1cca285a320a3ec750000ba56e920dc8f8eb242f28becec: Status 404 returned error can't find the container with id 6fcaf01a5ac509caa1cca285a320a3ec750000ba56e920dc8f8eb242f28becec Oct 08 07:37:55 crc kubenswrapper[4693]: I1008 07:37:55.234674 4693 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 08 07:37:55 crc kubenswrapper[4693]: I1008 07:37:55.263503 4693 generic.go:334] "Generic (PLEG): container finished" podID="1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a" containerID="35872852c57162e89de2cd329dba38a10f00b1a538f596fafceef1de4849a315" exitCode=0 Oct 08 07:37:55 crc kubenswrapper[4693]: I1008 07:37:55.263569 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a","Type":"ContainerDied","Data":"35872852c57162e89de2cd329dba38a10f00b1a538f596fafceef1de4849a315"} Oct 08 07:37:55 crc kubenswrapper[4693]: I1008 07:37:55.266708 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv" event={"ID":"2213ef00-9e58-4d62-84f2-026ff39b7127","Type":"ContainerStarted","Data":"6fcaf01a5ac509caa1cca285a320a3ec750000ba56e920dc8f8eb242f28becec"} Oct 08 07:37:56 crc kubenswrapper[4693]: I1008 07:37:56.280049 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a","Type":"ContainerStarted","Data":"a2e1e9a0fbbd0b9502a1514b625ebcd193cfb6151fea3a94632eebf8ad26acb6"} Oct 08 07:37:56 crc kubenswrapper[4693]: I1008 07:37:56.280594 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:37:56 crc kubenswrapper[4693]: I1008 07:37:56.307382 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.307357848 podStartE2EDuration="37.307357848s" podCreationTimestamp="2025-10-08 07:37:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:37:56.306485646 +0000 UTC m=+1261.677450591" watchObservedRunningTime="2025-10-08 07:37:56.307357848 +0000 UTC m=+1261.678322783" Oct 08 07:38:03 crc kubenswrapper[4693]: I1008 07:38:03.557539 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 08 07:38:04 crc kubenswrapper[4693]: I1008 07:38:04.352672 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv" event={"ID":"2213ef00-9e58-4d62-84f2-026ff39b7127","Type":"ContainerStarted","Data":"1d52fcc24a683d361691b27a110314b17f7b03be2b3ab0f00655b3726a051783"} Oct 08 07:38:04 crc kubenswrapper[4693]: I1008 07:38:04.382070 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv" podStartSLOduration=2.061990508 podStartE2EDuration="10.382046082s" podCreationTimestamp="2025-10-08 07:37:54 +0000 UTC" firstStartedPulling="2025-10-08 07:37:55.234464713 +0000 UTC m=+1260.605429648" lastFinishedPulling="2025-10-08 07:38:03.554520267 +0000 UTC m=+1268.925485222" observedRunningTime="2025-10-08 07:38:04.372647953 +0000 UTC m=+1269.743612928" watchObservedRunningTime="2025-10-08 07:38:04.382046082 +0000 UTC m=+1269.753011047" Oct 08 07:38:08 crc kubenswrapper[4693]: I1008 07:38:08.210103 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 08 07:38:09 crc kubenswrapper[4693]: I1008 07:38:09.477964 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 08 07:38:15 crc kubenswrapper[4693]: I1008 07:38:15.458125 4693 generic.go:334] "Generic (PLEG): container finished" podID="2213ef00-9e58-4d62-84f2-026ff39b7127" containerID="1d52fcc24a683d361691b27a110314b17f7b03be2b3ab0f00655b3726a051783" exitCode=0 Oct 08 07:38:15 crc kubenswrapper[4693]: I1008 07:38:15.458228 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv" event={"ID":"2213ef00-9e58-4d62-84f2-026ff39b7127","Type":"ContainerDied","Data":"1d52fcc24a683d361691b27a110314b17f7b03be2b3ab0f00655b3726a051783"} Oct 08 07:38:16 crc kubenswrapper[4693]: I1008 07:38:16.938389 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.071794 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9vsz9\" (UniqueName: \"kubernetes.io/projected/2213ef00-9e58-4d62-84f2-026ff39b7127-kube-api-access-9vsz9\") pod \"2213ef00-9e58-4d62-84f2-026ff39b7127\" (UID: \"2213ef00-9e58-4d62-84f2-026ff39b7127\") " Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.072177 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2213ef00-9e58-4d62-84f2-026ff39b7127-inventory\") pod \"2213ef00-9e58-4d62-84f2-026ff39b7127\" (UID: \"2213ef00-9e58-4d62-84f2-026ff39b7127\") " Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.072247 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2213ef00-9e58-4d62-84f2-026ff39b7127-ssh-key\") pod \"2213ef00-9e58-4d62-84f2-026ff39b7127\" (UID: \"2213ef00-9e58-4d62-84f2-026ff39b7127\") " Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.072478 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2213ef00-9e58-4d62-84f2-026ff39b7127-repo-setup-combined-ca-bundle\") pod \"2213ef00-9e58-4d62-84f2-026ff39b7127\" (UID: \"2213ef00-9e58-4d62-84f2-026ff39b7127\") " Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.079836 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2213ef00-9e58-4d62-84f2-026ff39b7127-kube-api-access-9vsz9" (OuterVolumeSpecName: "kube-api-access-9vsz9") pod "2213ef00-9e58-4d62-84f2-026ff39b7127" (UID: "2213ef00-9e58-4d62-84f2-026ff39b7127"). InnerVolumeSpecName "kube-api-access-9vsz9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.081551 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2213ef00-9e58-4d62-84f2-026ff39b7127-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "2213ef00-9e58-4d62-84f2-026ff39b7127" (UID: "2213ef00-9e58-4d62-84f2-026ff39b7127"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.104915 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2213ef00-9e58-4d62-84f2-026ff39b7127-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2213ef00-9e58-4d62-84f2-026ff39b7127" (UID: "2213ef00-9e58-4d62-84f2-026ff39b7127"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.113198 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2213ef00-9e58-4d62-84f2-026ff39b7127-inventory" (OuterVolumeSpecName: "inventory") pod "2213ef00-9e58-4d62-84f2-026ff39b7127" (UID: "2213ef00-9e58-4d62-84f2-026ff39b7127"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.175379 4693 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2213ef00-9e58-4d62-84f2-026ff39b7127-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.175428 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9vsz9\" (UniqueName: \"kubernetes.io/projected/2213ef00-9e58-4d62-84f2-026ff39b7127-kube-api-access-9vsz9\") on node \"crc\" DevicePath \"\"" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.175444 4693 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2213ef00-9e58-4d62-84f2-026ff39b7127-inventory\") on node \"crc\" DevicePath \"\"" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.175456 4693 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2213ef00-9e58-4d62-84f2-026ff39b7127-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.481422 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv" event={"ID":"2213ef00-9e58-4d62-84f2-026ff39b7127","Type":"ContainerDied","Data":"6fcaf01a5ac509caa1cca285a320a3ec750000ba56e920dc8f8eb242f28becec"} Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.481468 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6fcaf01a5ac509caa1cca285a320a3ec750000ba56e920dc8f8eb242f28becec" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.481781 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.565022 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-jslrn"] Oct 08 07:38:17 crc kubenswrapper[4693]: E1008 07:38:17.565703 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2213ef00-9e58-4d62-84f2-026ff39b7127" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.565736 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="2213ef00-9e58-4d62-84f2-026ff39b7127" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.566244 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="2213ef00-9e58-4d62-84f2-026ff39b7127" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.567436 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jslrn" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.570278 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.570451 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zxm2d" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.570574 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.570690 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.577660 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-jslrn"] Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.690041 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/43bf2dc8-6a52-47ce-978e-9d9fef6ae67c-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-jslrn\" (UID: \"43bf2dc8-6a52-47ce-978e-9d9fef6ae67c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jslrn" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.690169 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/43bf2dc8-6a52-47ce-978e-9d9fef6ae67c-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-jslrn\" (UID: \"43bf2dc8-6a52-47ce-978e-9d9fef6ae67c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jslrn" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.690196 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2t548\" (UniqueName: \"kubernetes.io/projected/43bf2dc8-6a52-47ce-978e-9d9fef6ae67c-kube-api-access-2t548\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-jslrn\" (UID: \"43bf2dc8-6a52-47ce-978e-9d9fef6ae67c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jslrn" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.792655 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/43bf2dc8-6a52-47ce-978e-9d9fef6ae67c-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-jslrn\" (UID: \"43bf2dc8-6a52-47ce-978e-9d9fef6ae67c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jslrn" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.792809 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/43bf2dc8-6a52-47ce-978e-9d9fef6ae67c-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-jslrn\" (UID: \"43bf2dc8-6a52-47ce-978e-9d9fef6ae67c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jslrn" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.792884 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2t548\" (UniqueName: \"kubernetes.io/projected/43bf2dc8-6a52-47ce-978e-9d9fef6ae67c-kube-api-access-2t548\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-jslrn\" (UID: \"43bf2dc8-6a52-47ce-978e-9d9fef6ae67c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jslrn" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.798792 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/43bf2dc8-6a52-47ce-978e-9d9fef6ae67c-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-jslrn\" (UID: \"43bf2dc8-6a52-47ce-978e-9d9fef6ae67c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jslrn" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.798909 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/43bf2dc8-6a52-47ce-978e-9d9fef6ae67c-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-jslrn\" (UID: \"43bf2dc8-6a52-47ce-978e-9d9fef6ae67c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jslrn" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.823868 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2t548\" (UniqueName: \"kubernetes.io/projected/43bf2dc8-6a52-47ce-978e-9d9fef6ae67c-kube-api-access-2t548\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-jslrn\" (UID: \"43bf2dc8-6a52-47ce-978e-9d9fef6ae67c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jslrn" Oct 08 07:38:17 crc kubenswrapper[4693]: I1008 07:38:17.895064 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jslrn" Oct 08 07:38:18 crc kubenswrapper[4693]: I1008 07:38:18.481266 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-jslrn"] Oct 08 07:38:19 crc kubenswrapper[4693]: I1008 07:38:19.516221 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jslrn" event={"ID":"43bf2dc8-6a52-47ce-978e-9d9fef6ae67c","Type":"ContainerStarted","Data":"c97fc619bad26dd8efcf140df6593c34a151e6a94cd04b38939927d5fd921b20"} Oct 08 07:38:19 crc kubenswrapper[4693]: I1008 07:38:19.517405 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jslrn" event={"ID":"43bf2dc8-6a52-47ce-978e-9d9fef6ae67c","Type":"ContainerStarted","Data":"d92c21a53209fa13e5182517b671a8def6b4c311ee3a6dcddf3f515e9d88e086"} Oct 08 07:38:19 crc kubenswrapper[4693]: I1008 07:38:19.541204 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jslrn" podStartSLOduration=1.998621583 podStartE2EDuration="2.541183082s" podCreationTimestamp="2025-10-08 07:38:17 +0000 UTC" firstStartedPulling="2025-10-08 07:38:18.489911755 +0000 UTC m=+1283.860876680" lastFinishedPulling="2025-10-08 07:38:19.032473244 +0000 UTC m=+1284.403438179" observedRunningTime="2025-10-08 07:38:19.533769364 +0000 UTC m=+1284.904734389" watchObservedRunningTime="2025-10-08 07:38:19.541183082 +0000 UTC m=+1284.912148027" Oct 08 07:38:22 crc kubenswrapper[4693]: I1008 07:38:22.560363 4693 generic.go:334] "Generic (PLEG): container finished" podID="43bf2dc8-6a52-47ce-978e-9d9fef6ae67c" containerID="c97fc619bad26dd8efcf140df6593c34a151e6a94cd04b38939927d5fd921b20" exitCode=0 Oct 08 07:38:22 crc kubenswrapper[4693]: I1008 07:38:22.560435 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jslrn" event={"ID":"43bf2dc8-6a52-47ce-978e-9d9fef6ae67c","Type":"ContainerDied","Data":"c97fc619bad26dd8efcf140df6593c34a151e6a94cd04b38939927d5fd921b20"} Oct 08 07:38:23 crc kubenswrapper[4693]: I1008 07:38:23.489537 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:38:23 crc kubenswrapper[4693]: I1008 07:38:23.489611 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:38:23 crc kubenswrapper[4693]: I1008 07:38:23.489663 4693 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 07:38:23 crc kubenswrapper[4693]: I1008 07:38:23.490594 4693 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"007470b3bab1092250300403efa58dc0217e53cad25ad454a5438806005d0400"} pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 08 07:38:23 crc kubenswrapper[4693]: I1008 07:38:23.490688 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" containerID="cri-o://007470b3bab1092250300403efa58dc0217e53cad25ad454a5438806005d0400" gracePeriod=600 Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.041012 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jslrn" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.129990 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2t548\" (UniqueName: \"kubernetes.io/projected/43bf2dc8-6a52-47ce-978e-9d9fef6ae67c-kube-api-access-2t548\") pod \"43bf2dc8-6a52-47ce-978e-9d9fef6ae67c\" (UID: \"43bf2dc8-6a52-47ce-978e-9d9fef6ae67c\") " Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.130100 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/43bf2dc8-6a52-47ce-978e-9d9fef6ae67c-inventory\") pod \"43bf2dc8-6a52-47ce-978e-9d9fef6ae67c\" (UID: \"43bf2dc8-6a52-47ce-978e-9d9fef6ae67c\") " Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.130354 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/43bf2dc8-6a52-47ce-978e-9d9fef6ae67c-ssh-key\") pod \"43bf2dc8-6a52-47ce-978e-9d9fef6ae67c\" (UID: \"43bf2dc8-6a52-47ce-978e-9d9fef6ae67c\") " Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.135860 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43bf2dc8-6a52-47ce-978e-9d9fef6ae67c-kube-api-access-2t548" (OuterVolumeSpecName: "kube-api-access-2t548") pod "43bf2dc8-6a52-47ce-978e-9d9fef6ae67c" (UID: "43bf2dc8-6a52-47ce-978e-9d9fef6ae67c"). InnerVolumeSpecName "kube-api-access-2t548". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.158745 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43bf2dc8-6a52-47ce-978e-9d9fef6ae67c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "43bf2dc8-6a52-47ce-978e-9d9fef6ae67c" (UID: "43bf2dc8-6a52-47ce-978e-9d9fef6ae67c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.161009 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43bf2dc8-6a52-47ce-978e-9d9fef6ae67c-inventory" (OuterVolumeSpecName: "inventory") pod "43bf2dc8-6a52-47ce-978e-9d9fef6ae67c" (UID: "43bf2dc8-6a52-47ce-978e-9d9fef6ae67c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.232563 4693 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/43bf2dc8-6a52-47ce-978e-9d9fef6ae67c-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.232597 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2t548\" (UniqueName: \"kubernetes.io/projected/43bf2dc8-6a52-47ce-978e-9d9fef6ae67c-kube-api-access-2t548\") on node \"crc\" DevicePath \"\"" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.232608 4693 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/43bf2dc8-6a52-47ce-978e-9d9fef6ae67c-inventory\") on node \"crc\" DevicePath \"\"" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.582716 4693 generic.go:334] "Generic (PLEG): container finished" podID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerID="007470b3bab1092250300403efa58dc0217e53cad25ad454a5438806005d0400" exitCode=0 Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.582783 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerDied","Data":"007470b3bab1092250300403efa58dc0217e53cad25ad454a5438806005d0400"} Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.583148 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerStarted","Data":"c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503"} Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.583166 4693 scope.go:117] "RemoveContainer" containerID="622f06c0bb6cdaa5465830b91799216c70f2eacc877b5e7e53cedc7bd9a96277" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.585334 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jslrn" event={"ID":"43bf2dc8-6a52-47ce-978e-9d9fef6ae67c","Type":"ContainerDied","Data":"d92c21a53209fa13e5182517b671a8def6b4c311ee3a6dcddf3f515e9d88e086"} Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.585377 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d92c21a53209fa13e5182517b671a8def6b4c311ee3a6dcddf3f515e9d88e086" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.585397 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jslrn" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.659242 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz"] Oct 08 07:38:24 crc kubenswrapper[4693]: E1008 07:38:24.659611 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43bf2dc8-6a52-47ce-978e-9d9fef6ae67c" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.659628 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="43bf2dc8-6a52-47ce-978e-9d9fef6ae67c" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.659928 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="43bf2dc8-6a52-47ce-978e-9d9fef6ae67c" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.660568 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.662855 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.663002 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zxm2d" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.664062 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.667099 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.676087 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz"] Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.747431 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bj96h\" (UniqueName: \"kubernetes.io/projected/ac6cb698-ba08-46e2-a8ae-557f656d3209-kube-api-access-bj96h\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz\" (UID: \"ac6cb698-ba08-46e2-a8ae-557f656d3209\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.747539 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ac6cb698-ba08-46e2-a8ae-557f656d3209-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz\" (UID: \"ac6cb698-ba08-46e2-a8ae-557f656d3209\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.748117 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac6cb698-ba08-46e2-a8ae-557f656d3209-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz\" (UID: \"ac6cb698-ba08-46e2-a8ae-557f656d3209\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.748248 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ac6cb698-ba08-46e2-a8ae-557f656d3209-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz\" (UID: \"ac6cb698-ba08-46e2-a8ae-557f656d3209\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.850233 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bj96h\" (UniqueName: \"kubernetes.io/projected/ac6cb698-ba08-46e2-a8ae-557f656d3209-kube-api-access-bj96h\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz\" (UID: \"ac6cb698-ba08-46e2-a8ae-557f656d3209\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.850305 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ac6cb698-ba08-46e2-a8ae-557f656d3209-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz\" (UID: \"ac6cb698-ba08-46e2-a8ae-557f656d3209\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.850412 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac6cb698-ba08-46e2-a8ae-557f656d3209-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz\" (UID: \"ac6cb698-ba08-46e2-a8ae-557f656d3209\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.850450 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ac6cb698-ba08-46e2-a8ae-557f656d3209-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz\" (UID: \"ac6cb698-ba08-46e2-a8ae-557f656d3209\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.855920 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ac6cb698-ba08-46e2-a8ae-557f656d3209-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz\" (UID: \"ac6cb698-ba08-46e2-a8ae-557f656d3209\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.856218 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ac6cb698-ba08-46e2-a8ae-557f656d3209-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz\" (UID: \"ac6cb698-ba08-46e2-a8ae-557f656d3209\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.859536 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac6cb698-ba08-46e2-a8ae-557f656d3209-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz\" (UID: \"ac6cb698-ba08-46e2-a8ae-557f656d3209\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.870022 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bj96h\" (UniqueName: \"kubernetes.io/projected/ac6cb698-ba08-46e2-a8ae-557f656d3209-kube-api-access-bj96h\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz\" (UID: \"ac6cb698-ba08-46e2-a8ae-557f656d3209\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz" Oct 08 07:38:24 crc kubenswrapper[4693]: I1008 07:38:24.983004 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz" Oct 08 07:38:26 crc kubenswrapper[4693]: I1008 07:38:26.622112 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz"] Oct 08 07:38:27 crc kubenswrapper[4693]: I1008 07:38:27.628922 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz" event={"ID":"ac6cb698-ba08-46e2-a8ae-557f656d3209","Type":"ContainerStarted","Data":"7e113d1ba04227d40cfb4e19e2798bcff50c815718e7abf0fbecc9811d494c1d"} Oct 08 07:38:27 crc kubenswrapper[4693]: I1008 07:38:27.629429 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz" event={"ID":"ac6cb698-ba08-46e2-a8ae-557f656d3209","Type":"ContainerStarted","Data":"d5bc0ec4b7f63945eb54281ef639a564f9f3e3ba4534a2d1b9427db0ead04598"} Oct 08 07:38:27 crc kubenswrapper[4693]: I1008 07:38:27.659462 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz" podStartSLOduration=3.235997809 podStartE2EDuration="3.659430032s" podCreationTimestamp="2025-10-08 07:38:24 +0000 UTC" firstStartedPulling="2025-10-08 07:38:26.629912717 +0000 UTC m=+1292.000877662" lastFinishedPulling="2025-10-08 07:38:27.05334495 +0000 UTC m=+1292.424309885" observedRunningTime="2025-10-08 07:38:27.652161447 +0000 UTC m=+1293.023126422" watchObservedRunningTime="2025-10-08 07:38:27.659430032 +0000 UTC m=+1293.030395007" Oct 08 07:39:15 crc kubenswrapper[4693]: I1008 07:39:15.060559 4693 scope.go:117] "RemoveContainer" containerID="492a0c81aaf74283b777935b1656d1faba8cac2fd2764d59a1b4d8c28a159cb3" Oct 08 07:40:15 crc kubenswrapper[4693]: I1008 07:40:15.133953 4693 scope.go:117] "RemoveContainer" containerID="fc3c598c4d37e5c2481815949bbebaec782b310b23a8009a7a37554fbc6f2a78" Oct 08 07:40:15 crc kubenswrapper[4693]: I1008 07:40:15.174449 4693 scope.go:117] "RemoveContainer" containerID="e22267284f4fcaa7c0087f73c53744f55292866a67c2c9416e44c2620a6a37dd" Oct 08 07:40:15 crc kubenswrapper[4693]: I1008 07:40:15.215113 4693 scope.go:117] "RemoveContainer" containerID="4b9738f5d8570ba3579097538ce1438277cc9dd0a3519d9dc7f2eb72c010df5c" Oct 08 07:40:23 crc kubenswrapper[4693]: I1008 07:40:23.489631 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:40:23 crc kubenswrapper[4693]: I1008 07:40:23.490277 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:40:45 crc kubenswrapper[4693]: I1008 07:40:45.638055 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6gxfc"] Oct 08 07:40:45 crc kubenswrapper[4693]: I1008 07:40:45.643312 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6gxfc" Oct 08 07:40:45 crc kubenswrapper[4693]: I1008 07:40:45.660021 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6gxfc"] Oct 08 07:40:45 crc kubenswrapper[4693]: I1008 07:40:45.791426 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q49gq\" (UniqueName: \"kubernetes.io/projected/b4984928-b255-47ab-a9a0-0d9d55ecbf65-kube-api-access-q49gq\") pod \"redhat-operators-6gxfc\" (UID: \"b4984928-b255-47ab-a9a0-0d9d55ecbf65\") " pod="openshift-marketplace/redhat-operators-6gxfc" Oct 08 07:40:45 crc kubenswrapper[4693]: I1008 07:40:45.791499 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4984928-b255-47ab-a9a0-0d9d55ecbf65-catalog-content\") pod \"redhat-operators-6gxfc\" (UID: \"b4984928-b255-47ab-a9a0-0d9d55ecbf65\") " pod="openshift-marketplace/redhat-operators-6gxfc" Oct 08 07:40:45 crc kubenswrapper[4693]: I1008 07:40:45.791530 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4984928-b255-47ab-a9a0-0d9d55ecbf65-utilities\") pod \"redhat-operators-6gxfc\" (UID: \"b4984928-b255-47ab-a9a0-0d9d55ecbf65\") " pod="openshift-marketplace/redhat-operators-6gxfc" Oct 08 07:40:45 crc kubenswrapper[4693]: I1008 07:40:45.893731 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q49gq\" (UniqueName: \"kubernetes.io/projected/b4984928-b255-47ab-a9a0-0d9d55ecbf65-kube-api-access-q49gq\") pod \"redhat-operators-6gxfc\" (UID: \"b4984928-b255-47ab-a9a0-0d9d55ecbf65\") " pod="openshift-marketplace/redhat-operators-6gxfc" Oct 08 07:40:45 crc kubenswrapper[4693]: I1008 07:40:45.893854 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4984928-b255-47ab-a9a0-0d9d55ecbf65-catalog-content\") pod \"redhat-operators-6gxfc\" (UID: \"b4984928-b255-47ab-a9a0-0d9d55ecbf65\") " pod="openshift-marketplace/redhat-operators-6gxfc" Oct 08 07:40:45 crc kubenswrapper[4693]: I1008 07:40:45.893896 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4984928-b255-47ab-a9a0-0d9d55ecbf65-utilities\") pod \"redhat-operators-6gxfc\" (UID: \"b4984928-b255-47ab-a9a0-0d9d55ecbf65\") " pod="openshift-marketplace/redhat-operators-6gxfc" Oct 08 07:40:45 crc kubenswrapper[4693]: I1008 07:40:45.894482 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4984928-b255-47ab-a9a0-0d9d55ecbf65-catalog-content\") pod \"redhat-operators-6gxfc\" (UID: \"b4984928-b255-47ab-a9a0-0d9d55ecbf65\") " pod="openshift-marketplace/redhat-operators-6gxfc" Oct 08 07:40:45 crc kubenswrapper[4693]: I1008 07:40:45.894611 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4984928-b255-47ab-a9a0-0d9d55ecbf65-utilities\") pod \"redhat-operators-6gxfc\" (UID: \"b4984928-b255-47ab-a9a0-0d9d55ecbf65\") " pod="openshift-marketplace/redhat-operators-6gxfc" Oct 08 07:40:45 crc kubenswrapper[4693]: I1008 07:40:45.919770 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q49gq\" (UniqueName: \"kubernetes.io/projected/b4984928-b255-47ab-a9a0-0d9d55ecbf65-kube-api-access-q49gq\") pod \"redhat-operators-6gxfc\" (UID: \"b4984928-b255-47ab-a9a0-0d9d55ecbf65\") " pod="openshift-marketplace/redhat-operators-6gxfc" Oct 08 07:40:45 crc kubenswrapper[4693]: I1008 07:40:45.979519 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6gxfc" Oct 08 07:40:46 crc kubenswrapper[4693]: I1008 07:40:46.490656 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6gxfc"] Oct 08 07:40:47 crc kubenswrapper[4693]: I1008 07:40:47.243115 4693 generic.go:334] "Generic (PLEG): container finished" podID="b4984928-b255-47ab-a9a0-0d9d55ecbf65" containerID="3e7fe4aa4b3dcd7854372ad753e4d617388d56b198dd88b4fb78327c19957e77" exitCode=0 Oct 08 07:40:47 crc kubenswrapper[4693]: I1008 07:40:47.243432 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6gxfc" event={"ID":"b4984928-b255-47ab-a9a0-0d9d55ecbf65","Type":"ContainerDied","Data":"3e7fe4aa4b3dcd7854372ad753e4d617388d56b198dd88b4fb78327c19957e77"} Oct 08 07:40:47 crc kubenswrapper[4693]: I1008 07:40:47.243471 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6gxfc" event={"ID":"b4984928-b255-47ab-a9a0-0d9d55ecbf65","Type":"ContainerStarted","Data":"aa51eade47f190a2a5e2b3d38bf47e823964ccc4284381250345ec13471229dc"} Oct 08 07:40:48 crc kubenswrapper[4693]: I1008 07:40:48.254043 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6gxfc" event={"ID":"b4984928-b255-47ab-a9a0-0d9d55ecbf65","Type":"ContainerStarted","Data":"50a104df8f3553116ad74b2709907e6957f7fd047d9050c0bb8347231ba1727d"} Oct 08 07:40:50 crc kubenswrapper[4693]: I1008 07:40:50.275998 4693 generic.go:334] "Generic (PLEG): container finished" podID="b4984928-b255-47ab-a9a0-0d9d55ecbf65" containerID="50a104df8f3553116ad74b2709907e6957f7fd047d9050c0bb8347231ba1727d" exitCode=0 Oct 08 07:40:50 crc kubenswrapper[4693]: I1008 07:40:50.276171 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6gxfc" event={"ID":"b4984928-b255-47ab-a9a0-0d9d55ecbf65","Type":"ContainerDied","Data":"50a104df8f3553116ad74b2709907e6957f7fd047d9050c0bb8347231ba1727d"} Oct 08 07:40:51 crc kubenswrapper[4693]: I1008 07:40:51.292282 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6gxfc" event={"ID":"b4984928-b255-47ab-a9a0-0d9d55ecbf65","Type":"ContainerStarted","Data":"586e50884cd4214c9efc5f6daf47d5c2e6e980312d68cf495ba094f8c22596bf"} Oct 08 07:40:51 crc kubenswrapper[4693]: I1008 07:40:51.328776 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6gxfc" podStartSLOduration=2.836048427 podStartE2EDuration="6.328753957s" podCreationTimestamp="2025-10-08 07:40:45 +0000 UTC" firstStartedPulling="2025-10-08 07:40:47.245528032 +0000 UTC m=+1432.616492997" lastFinishedPulling="2025-10-08 07:40:50.738233552 +0000 UTC m=+1436.109198527" observedRunningTime="2025-10-08 07:40:51.31633104 +0000 UTC m=+1436.687295975" watchObservedRunningTime="2025-10-08 07:40:51.328753957 +0000 UTC m=+1436.699718892" Oct 08 07:40:53 crc kubenswrapper[4693]: I1008 07:40:53.489794 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:40:53 crc kubenswrapper[4693]: I1008 07:40:53.490195 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:40:55 crc kubenswrapper[4693]: I1008 07:40:55.979952 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6gxfc" Oct 08 07:40:55 crc kubenswrapper[4693]: I1008 07:40:55.980566 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6gxfc" Oct 08 07:40:56 crc kubenswrapper[4693]: I1008 07:40:56.030433 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6gxfc" Oct 08 07:40:56 crc kubenswrapper[4693]: I1008 07:40:56.385627 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6gxfc" Oct 08 07:40:56 crc kubenswrapper[4693]: I1008 07:40:56.443642 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6gxfc"] Oct 08 07:40:58 crc kubenswrapper[4693]: I1008 07:40:58.363138 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6gxfc" podUID="b4984928-b255-47ab-a9a0-0d9d55ecbf65" containerName="registry-server" containerID="cri-o://586e50884cd4214c9efc5f6daf47d5c2e6e980312d68cf495ba094f8c22596bf" gracePeriod=2 Oct 08 07:40:58 crc kubenswrapper[4693]: I1008 07:40:58.929411 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6gxfc" Oct 08 07:40:59 crc kubenswrapper[4693]: I1008 07:40:59.062193 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4984928-b255-47ab-a9a0-0d9d55ecbf65-catalog-content\") pod \"b4984928-b255-47ab-a9a0-0d9d55ecbf65\" (UID: \"b4984928-b255-47ab-a9a0-0d9d55ecbf65\") " Oct 08 07:40:59 crc kubenswrapper[4693]: I1008 07:40:59.062546 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4984928-b255-47ab-a9a0-0d9d55ecbf65-utilities\") pod \"b4984928-b255-47ab-a9a0-0d9d55ecbf65\" (UID: \"b4984928-b255-47ab-a9a0-0d9d55ecbf65\") " Oct 08 07:40:59 crc kubenswrapper[4693]: I1008 07:40:59.062734 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q49gq\" (UniqueName: \"kubernetes.io/projected/b4984928-b255-47ab-a9a0-0d9d55ecbf65-kube-api-access-q49gq\") pod \"b4984928-b255-47ab-a9a0-0d9d55ecbf65\" (UID: \"b4984928-b255-47ab-a9a0-0d9d55ecbf65\") " Oct 08 07:40:59 crc kubenswrapper[4693]: I1008 07:40:59.063383 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4984928-b255-47ab-a9a0-0d9d55ecbf65-utilities" (OuterVolumeSpecName: "utilities") pod "b4984928-b255-47ab-a9a0-0d9d55ecbf65" (UID: "b4984928-b255-47ab-a9a0-0d9d55ecbf65"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:40:59 crc kubenswrapper[4693]: I1008 07:40:59.068379 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4984928-b255-47ab-a9a0-0d9d55ecbf65-kube-api-access-q49gq" (OuterVolumeSpecName: "kube-api-access-q49gq") pod "b4984928-b255-47ab-a9a0-0d9d55ecbf65" (UID: "b4984928-b255-47ab-a9a0-0d9d55ecbf65"). InnerVolumeSpecName "kube-api-access-q49gq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:40:59 crc kubenswrapper[4693]: I1008 07:40:59.157767 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4984928-b255-47ab-a9a0-0d9d55ecbf65-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b4984928-b255-47ab-a9a0-0d9d55ecbf65" (UID: "b4984928-b255-47ab-a9a0-0d9d55ecbf65"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:40:59 crc kubenswrapper[4693]: I1008 07:40:59.165203 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q49gq\" (UniqueName: \"kubernetes.io/projected/b4984928-b255-47ab-a9a0-0d9d55ecbf65-kube-api-access-q49gq\") on node \"crc\" DevicePath \"\"" Oct 08 07:40:59 crc kubenswrapper[4693]: I1008 07:40:59.165233 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4984928-b255-47ab-a9a0-0d9d55ecbf65-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 07:40:59 crc kubenswrapper[4693]: I1008 07:40:59.165247 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4984928-b255-47ab-a9a0-0d9d55ecbf65-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 07:40:59 crc kubenswrapper[4693]: I1008 07:40:59.375726 4693 generic.go:334] "Generic (PLEG): container finished" podID="b4984928-b255-47ab-a9a0-0d9d55ecbf65" containerID="586e50884cd4214c9efc5f6daf47d5c2e6e980312d68cf495ba094f8c22596bf" exitCode=0 Oct 08 07:40:59 crc kubenswrapper[4693]: I1008 07:40:59.375848 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6gxfc" Oct 08 07:40:59 crc kubenswrapper[4693]: I1008 07:40:59.381764 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6gxfc" event={"ID":"b4984928-b255-47ab-a9a0-0d9d55ecbf65","Type":"ContainerDied","Data":"586e50884cd4214c9efc5f6daf47d5c2e6e980312d68cf495ba094f8c22596bf"} Oct 08 07:40:59 crc kubenswrapper[4693]: I1008 07:40:59.381849 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6gxfc" event={"ID":"b4984928-b255-47ab-a9a0-0d9d55ecbf65","Type":"ContainerDied","Data":"aa51eade47f190a2a5e2b3d38bf47e823964ccc4284381250345ec13471229dc"} Oct 08 07:40:59 crc kubenswrapper[4693]: I1008 07:40:59.381880 4693 scope.go:117] "RemoveContainer" containerID="586e50884cd4214c9efc5f6daf47d5c2e6e980312d68cf495ba094f8c22596bf" Oct 08 07:40:59 crc kubenswrapper[4693]: I1008 07:40:59.421519 4693 scope.go:117] "RemoveContainer" containerID="50a104df8f3553116ad74b2709907e6957f7fd047d9050c0bb8347231ba1727d" Oct 08 07:40:59 crc kubenswrapper[4693]: I1008 07:40:59.436445 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6gxfc"] Oct 08 07:40:59 crc kubenswrapper[4693]: I1008 07:40:59.446050 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6gxfc"] Oct 08 07:40:59 crc kubenswrapper[4693]: I1008 07:40:59.447093 4693 scope.go:117] "RemoveContainer" containerID="3e7fe4aa4b3dcd7854372ad753e4d617388d56b198dd88b4fb78327c19957e77" Oct 08 07:40:59 crc kubenswrapper[4693]: I1008 07:40:59.497302 4693 scope.go:117] "RemoveContainer" containerID="586e50884cd4214c9efc5f6daf47d5c2e6e980312d68cf495ba094f8c22596bf" Oct 08 07:40:59 crc kubenswrapper[4693]: E1008 07:40:59.497915 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"586e50884cd4214c9efc5f6daf47d5c2e6e980312d68cf495ba094f8c22596bf\": container with ID starting with 586e50884cd4214c9efc5f6daf47d5c2e6e980312d68cf495ba094f8c22596bf not found: ID does not exist" containerID="586e50884cd4214c9efc5f6daf47d5c2e6e980312d68cf495ba094f8c22596bf" Oct 08 07:40:59 crc kubenswrapper[4693]: I1008 07:40:59.497972 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"586e50884cd4214c9efc5f6daf47d5c2e6e980312d68cf495ba094f8c22596bf"} err="failed to get container status \"586e50884cd4214c9efc5f6daf47d5c2e6e980312d68cf495ba094f8c22596bf\": rpc error: code = NotFound desc = could not find container \"586e50884cd4214c9efc5f6daf47d5c2e6e980312d68cf495ba094f8c22596bf\": container with ID starting with 586e50884cd4214c9efc5f6daf47d5c2e6e980312d68cf495ba094f8c22596bf not found: ID does not exist" Oct 08 07:40:59 crc kubenswrapper[4693]: I1008 07:40:59.498005 4693 scope.go:117] "RemoveContainer" containerID="50a104df8f3553116ad74b2709907e6957f7fd047d9050c0bb8347231ba1727d" Oct 08 07:40:59 crc kubenswrapper[4693]: E1008 07:40:59.499708 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50a104df8f3553116ad74b2709907e6957f7fd047d9050c0bb8347231ba1727d\": container with ID starting with 50a104df8f3553116ad74b2709907e6957f7fd047d9050c0bb8347231ba1727d not found: ID does not exist" containerID="50a104df8f3553116ad74b2709907e6957f7fd047d9050c0bb8347231ba1727d" Oct 08 07:40:59 crc kubenswrapper[4693]: I1008 07:40:59.499738 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50a104df8f3553116ad74b2709907e6957f7fd047d9050c0bb8347231ba1727d"} err="failed to get container status \"50a104df8f3553116ad74b2709907e6957f7fd047d9050c0bb8347231ba1727d\": rpc error: code = NotFound desc = could not find container \"50a104df8f3553116ad74b2709907e6957f7fd047d9050c0bb8347231ba1727d\": container with ID starting with 50a104df8f3553116ad74b2709907e6957f7fd047d9050c0bb8347231ba1727d not found: ID does not exist" Oct 08 07:40:59 crc kubenswrapper[4693]: I1008 07:40:59.499776 4693 scope.go:117] "RemoveContainer" containerID="3e7fe4aa4b3dcd7854372ad753e4d617388d56b198dd88b4fb78327c19957e77" Oct 08 07:40:59 crc kubenswrapper[4693]: E1008 07:40:59.501256 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e7fe4aa4b3dcd7854372ad753e4d617388d56b198dd88b4fb78327c19957e77\": container with ID starting with 3e7fe4aa4b3dcd7854372ad753e4d617388d56b198dd88b4fb78327c19957e77 not found: ID does not exist" containerID="3e7fe4aa4b3dcd7854372ad753e4d617388d56b198dd88b4fb78327c19957e77" Oct 08 07:40:59 crc kubenswrapper[4693]: I1008 07:40:59.501321 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e7fe4aa4b3dcd7854372ad753e4d617388d56b198dd88b4fb78327c19957e77"} err="failed to get container status \"3e7fe4aa4b3dcd7854372ad753e4d617388d56b198dd88b4fb78327c19957e77\": rpc error: code = NotFound desc = could not find container \"3e7fe4aa4b3dcd7854372ad753e4d617388d56b198dd88b4fb78327c19957e77\": container with ID starting with 3e7fe4aa4b3dcd7854372ad753e4d617388d56b198dd88b4fb78327c19957e77 not found: ID does not exist" Oct 08 07:41:01 crc kubenswrapper[4693]: I1008 07:41:01.386322 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4984928-b255-47ab-a9a0-0d9d55ecbf65" path="/var/lib/kubelet/pods/b4984928-b255-47ab-a9a0-0d9d55ecbf65/volumes" Oct 08 07:41:11 crc kubenswrapper[4693]: I1008 07:41:11.834943 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vt599"] Oct 08 07:41:11 crc kubenswrapper[4693]: E1008 07:41:11.835788 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4984928-b255-47ab-a9a0-0d9d55ecbf65" containerName="extract-content" Oct 08 07:41:11 crc kubenswrapper[4693]: I1008 07:41:11.835801 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4984928-b255-47ab-a9a0-0d9d55ecbf65" containerName="extract-content" Oct 08 07:41:11 crc kubenswrapper[4693]: E1008 07:41:11.835854 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4984928-b255-47ab-a9a0-0d9d55ecbf65" containerName="extract-utilities" Oct 08 07:41:11 crc kubenswrapper[4693]: I1008 07:41:11.835861 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4984928-b255-47ab-a9a0-0d9d55ecbf65" containerName="extract-utilities" Oct 08 07:41:11 crc kubenswrapper[4693]: E1008 07:41:11.835883 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4984928-b255-47ab-a9a0-0d9d55ecbf65" containerName="registry-server" Oct 08 07:41:11 crc kubenswrapper[4693]: I1008 07:41:11.835889 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4984928-b255-47ab-a9a0-0d9d55ecbf65" containerName="registry-server" Oct 08 07:41:11 crc kubenswrapper[4693]: I1008 07:41:11.836123 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4984928-b255-47ab-a9a0-0d9d55ecbf65" containerName="registry-server" Oct 08 07:41:11 crc kubenswrapper[4693]: I1008 07:41:11.838317 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vt599" Oct 08 07:41:11 crc kubenswrapper[4693]: I1008 07:41:11.852397 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vt599"] Oct 08 07:41:12 crc kubenswrapper[4693]: I1008 07:41:12.039242 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4f1924c-7d6d-4c94-9903-33499a98ffb1-catalog-content\") pod \"community-operators-vt599\" (UID: \"f4f1924c-7d6d-4c94-9903-33499a98ffb1\") " pod="openshift-marketplace/community-operators-vt599" Oct 08 07:41:12 crc kubenswrapper[4693]: I1008 07:41:12.039506 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7f5rv\" (UniqueName: \"kubernetes.io/projected/f4f1924c-7d6d-4c94-9903-33499a98ffb1-kube-api-access-7f5rv\") pod \"community-operators-vt599\" (UID: \"f4f1924c-7d6d-4c94-9903-33499a98ffb1\") " pod="openshift-marketplace/community-operators-vt599" Oct 08 07:41:12 crc kubenswrapper[4693]: I1008 07:41:12.039563 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4f1924c-7d6d-4c94-9903-33499a98ffb1-utilities\") pod \"community-operators-vt599\" (UID: \"f4f1924c-7d6d-4c94-9903-33499a98ffb1\") " pod="openshift-marketplace/community-operators-vt599" Oct 08 07:41:12 crc kubenswrapper[4693]: I1008 07:41:12.141545 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4f1924c-7d6d-4c94-9903-33499a98ffb1-utilities\") pod \"community-operators-vt599\" (UID: \"f4f1924c-7d6d-4c94-9903-33499a98ffb1\") " pod="openshift-marketplace/community-operators-vt599" Oct 08 07:41:12 crc kubenswrapper[4693]: I1008 07:41:12.141737 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4f1924c-7d6d-4c94-9903-33499a98ffb1-catalog-content\") pod \"community-operators-vt599\" (UID: \"f4f1924c-7d6d-4c94-9903-33499a98ffb1\") " pod="openshift-marketplace/community-operators-vt599" Oct 08 07:41:12 crc kubenswrapper[4693]: I1008 07:41:12.141768 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7f5rv\" (UniqueName: \"kubernetes.io/projected/f4f1924c-7d6d-4c94-9903-33499a98ffb1-kube-api-access-7f5rv\") pod \"community-operators-vt599\" (UID: \"f4f1924c-7d6d-4c94-9903-33499a98ffb1\") " pod="openshift-marketplace/community-operators-vt599" Oct 08 07:41:12 crc kubenswrapper[4693]: I1008 07:41:12.142252 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4f1924c-7d6d-4c94-9903-33499a98ffb1-utilities\") pod \"community-operators-vt599\" (UID: \"f4f1924c-7d6d-4c94-9903-33499a98ffb1\") " pod="openshift-marketplace/community-operators-vt599" Oct 08 07:41:12 crc kubenswrapper[4693]: I1008 07:41:12.142276 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4f1924c-7d6d-4c94-9903-33499a98ffb1-catalog-content\") pod \"community-operators-vt599\" (UID: \"f4f1924c-7d6d-4c94-9903-33499a98ffb1\") " pod="openshift-marketplace/community-operators-vt599" Oct 08 07:41:12 crc kubenswrapper[4693]: I1008 07:41:12.163584 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7f5rv\" (UniqueName: \"kubernetes.io/projected/f4f1924c-7d6d-4c94-9903-33499a98ffb1-kube-api-access-7f5rv\") pod \"community-operators-vt599\" (UID: \"f4f1924c-7d6d-4c94-9903-33499a98ffb1\") " pod="openshift-marketplace/community-operators-vt599" Oct 08 07:41:12 crc kubenswrapper[4693]: I1008 07:41:12.171550 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vt599" Oct 08 07:41:12 crc kubenswrapper[4693]: I1008 07:41:12.682244 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vt599"] Oct 08 07:41:13 crc kubenswrapper[4693]: I1008 07:41:13.539849 4693 generic.go:334] "Generic (PLEG): container finished" podID="f4f1924c-7d6d-4c94-9903-33499a98ffb1" containerID="eb19f18f5fe054369e86fc8540751c61c1529ee4c640511909566fcc5cb1701c" exitCode=0 Oct 08 07:41:13 crc kubenswrapper[4693]: I1008 07:41:13.539931 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vt599" event={"ID":"f4f1924c-7d6d-4c94-9903-33499a98ffb1","Type":"ContainerDied","Data":"eb19f18f5fe054369e86fc8540751c61c1529ee4c640511909566fcc5cb1701c"} Oct 08 07:41:13 crc kubenswrapper[4693]: I1008 07:41:13.540236 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vt599" event={"ID":"f4f1924c-7d6d-4c94-9903-33499a98ffb1","Type":"ContainerStarted","Data":"77756987c18b5657926b106e6aa12da96af67f65409dec53a6122eb3318a8949"} Oct 08 07:41:15 crc kubenswrapper[4693]: I1008 07:41:15.338477 4693 scope.go:117] "RemoveContainer" containerID="dc6f87c7fb37695da0fdb4c4084e634fe8789a8f428290932111fb30cdc853f9" Oct 08 07:41:16 crc kubenswrapper[4693]: I1008 07:41:16.555302 4693 scope.go:117] "RemoveContainer" containerID="415b1473955b634edb41cad4e81485d03606c73826ceb376b76e67c33f0e7a15" Oct 08 07:41:16 crc kubenswrapper[4693]: I1008 07:41:16.630409 4693 scope.go:117] "RemoveContainer" containerID="709ab9aabcedb6747b30d52514a99ddf86bffd135eeeaf22b5581fece090e7b9" Oct 08 07:41:16 crc kubenswrapper[4693]: I1008 07:41:16.675683 4693 scope.go:117] "RemoveContainer" containerID="f63b5dbaf1ad3054df0f25142d88f63f061899da4cb2f4d959c4797c62b97f9d" Oct 08 07:41:17 crc kubenswrapper[4693]: I1008 07:41:17.590969 4693 generic.go:334] "Generic (PLEG): container finished" podID="f4f1924c-7d6d-4c94-9903-33499a98ffb1" containerID="b0b781f8f62fb22491d5ddc922f7298de89d5770ee9d2a9305035783dda7a51d" exitCode=0 Oct 08 07:41:17 crc kubenswrapper[4693]: I1008 07:41:17.591082 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vt599" event={"ID":"f4f1924c-7d6d-4c94-9903-33499a98ffb1","Type":"ContainerDied","Data":"b0b781f8f62fb22491d5ddc922f7298de89d5770ee9d2a9305035783dda7a51d"} Oct 08 07:41:18 crc kubenswrapper[4693]: I1008 07:41:18.604219 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vt599" event={"ID":"f4f1924c-7d6d-4c94-9903-33499a98ffb1","Type":"ContainerStarted","Data":"de4dc01f65c5ce7ccab6a4a8b9123228cb4883e3c7292772543d086d34c3183f"} Oct 08 07:41:18 crc kubenswrapper[4693]: I1008 07:41:18.633590 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vt599" podStartSLOduration=3.110960513 podStartE2EDuration="7.633564002s" podCreationTimestamp="2025-10-08 07:41:11 +0000 UTC" firstStartedPulling="2025-10-08 07:41:13.542556431 +0000 UTC m=+1458.913521366" lastFinishedPulling="2025-10-08 07:41:18.06515992 +0000 UTC m=+1463.436124855" observedRunningTime="2025-10-08 07:41:18.624684815 +0000 UTC m=+1463.995649760" watchObservedRunningTime="2025-10-08 07:41:18.633564002 +0000 UTC m=+1464.004528977" Oct 08 07:41:22 crc kubenswrapper[4693]: I1008 07:41:22.172996 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vt599" Oct 08 07:41:22 crc kubenswrapper[4693]: I1008 07:41:22.173441 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vt599" Oct 08 07:41:22 crc kubenswrapper[4693]: I1008 07:41:22.245540 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vt599" Oct 08 07:41:23 crc kubenswrapper[4693]: I1008 07:41:23.489400 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:41:23 crc kubenswrapper[4693]: I1008 07:41:23.489959 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:41:23 crc kubenswrapper[4693]: I1008 07:41:23.490044 4693 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 07:41:23 crc kubenswrapper[4693]: I1008 07:41:23.491128 4693 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503"} pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 08 07:41:23 crc kubenswrapper[4693]: I1008 07:41:23.491255 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" containerID="cri-o://c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503" gracePeriod=600 Oct 08 07:41:23 crc kubenswrapper[4693]: E1008 07:41:23.623222 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:41:23 crc kubenswrapper[4693]: I1008 07:41:23.663500 4693 generic.go:334] "Generic (PLEG): container finished" podID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerID="c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503" exitCode=0 Oct 08 07:41:23 crc kubenswrapper[4693]: I1008 07:41:23.663565 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerDied","Data":"c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503"} Oct 08 07:41:23 crc kubenswrapper[4693]: I1008 07:41:23.663620 4693 scope.go:117] "RemoveContainer" containerID="007470b3bab1092250300403efa58dc0217e53cad25ad454a5438806005d0400" Oct 08 07:41:23 crc kubenswrapper[4693]: I1008 07:41:23.664893 4693 scope.go:117] "RemoveContainer" containerID="c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503" Oct 08 07:41:23 crc kubenswrapper[4693]: E1008 07:41:23.665590 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:41:30 crc kubenswrapper[4693]: I1008 07:41:30.751939 4693 generic.go:334] "Generic (PLEG): container finished" podID="ac6cb698-ba08-46e2-a8ae-557f656d3209" containerID="7e113d1ba04227d40cfb4e19e2798bcff50c815718e7abf0fbecc9811d494c1d" exitCode=0 Oct 08 07:41:30 crc kubenswrapper[4693]: I1008 07:41:30.752018 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz" event={"ID":"ac6cb698-ba08-46e2-a8ae-557f656d3209","Type":"ContainerDied","Data":"7e113d1ba04227d40cfb4e19e2798bcff50c815718e7abf0fbecc9811d494c1d"} Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.242401 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vt599" Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.250234 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz" Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.319017 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vt599"] Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.349069 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5rw5w"] Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.349484 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5rw5w" podUID="1fd608a1-42a8-47e8-97f8-fc387766fae0" containerName="registry-server" containerID="cri-o://a4eb5f55dad5ab14bc0bd5528fbc99b80a63f477a1c1f4fdafcd37484c7fa5f3" gracePeriod=2 Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.425578 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ac6cb698-ba08-46e2-a8ae-557f656d3209-inventory\") pod \"ac6cb698-ba08-46e2-a8ae-557f656d3209\" (UID: \"ac6cb698-ba08-46e2-a8ae-557f656d3209\") " Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.425737 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ac6cb698-ba08-46e2-a8ae-557f656d3209-ssh-key\") pod \"ac6cb698-ba08-46e2-a8ae-557f656d3209\" (UID: \"ac6cb698-ba08-46e2-a8ae-557f656d3209\") " Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.425946 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bj96h\" (UniqueName: \"kubernetes.io/projected/ac6cb698-ba08-46e2-a8ae-557f656d3209-kube-api-access-bj96h\") pod \"ac6cb698-ba08-46e2-a8ae-557f656d3209\" (UID: \"ac6cb698-ba08-46e2-a8ae-557f656d3209\") " Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.425986 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac6cb698-ba08-46e2-a8ae-557f656d3209-bootstrap-combined-ca-bundle\") pod \"ac6cb698-ba08-46e2-a8ae-557f656d3209\" (UID: \"ac6cb698-ba08-46e2-a8ae-557f656d3209\") " Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.439108 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac6cb698-ba08-46e2-a8ae-557f656d3209-kube-api-access-bj96h" (OuterVolumeSpecName: "kube-api-access-bj96h") pod "ac6cb698-ba08-46e2-a8ae-557f656d3209" (UID: "ac6cb698-ba08-46e2-a8ae-557f656d3209"). InnerVolumeSpecName "kube-api-access-bj96h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.439117 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac6cb698-ba08-46e2-a8ae-557f656d3209-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "ac6cb698-ba08-46e2-a8ae-557f656d3209" (UID: "ac6cb698-ba08-46e2-a8ae-557f656d3209"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:41:32 crc kubenswrapper[4693]: E1008 07:41:32.452572 4693 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1fd608a1_42a8_47e8_97f8_fc387766fae0.slice/crio-a4eb5f55dad5ab14bc0bd5528fbc99b80a63f477a1c1f4fdafcd37484c7fa5f3.scope\": RecentStats: unable to find data in memory cache]" Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.477561 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac6cb698-ba08-46e2-a8ae-557f656d3209-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ac6cb698-ba08-46e2-a8ae-557f656d3209" (UID: "ac6cb698-ba08-46e2-a8ae-557f656d3209"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.477732 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac6cb698-ba08-46e2-a8ae-557f656d3209-inventory" (OuterVolumeSpecName: "inventory") pod "ac6cb698-ba08-46e2-a8ae-557f656d3209" (UID: "ac6cb698-ba08-46e2-a8ae-557f656d3209"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.529663 4693 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ac6cb698-ba08-46e2-a8ae-557f656d3209-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.529897 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bj96h\" (UniqueName: \"kubernetes.io/projected/ac6cb698-ba08-46e2-a8ae-557f656d3209-kube-api-access-bj96h\") on node \"crc\" DevicePath \"\"" Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.529909 4693 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac6cb698-ba08-46e2-a8ae-557f656d3209-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.529919 4693 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ac6cb698-ba08-46e2-a8ae-557f656d3209-inventory\") on node \"crc\" DevicePath \"\"" Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.786957 4693 generic.go:334] "Generic (PLEG): container finished" podID="1fd608a1-42a8-47e8-97f8-fc387766fae0" containerID="a4eb5f55dad5ab14bc0bd5528fbc99b80a63f477a1c1f4fdafcd37484c7fa5f3" exitCode=0 Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.787036 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5rw5w" event={"ID":"1fd608a1-42a8-47e8-97f8-fc387766fae0","Type":"ContainerDied","Data":"a4eb5f55dad5ab14bc0bd5528fbc99b80a63f477a1c1f4fdafcd37484c7fa5f3"} Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.787067 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5rw5w" event={"ID":"1fd608a1-42a8-47e8-97f8-fc387766fae0","Type":"ContainerDied","Data":"27300d5858e3e60989a0c39dbd85544e32eb7f524abac887ad65541283d0589d"} Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.787084 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27300d5858e3e60989a0c39dbd85544e32eb7f524abac887ad65541283d0589d" Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.789916 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz" Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.789954 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz" event={"ID":"ac6cb698-ba08-46e2-a8ae-557f656d3209","Type":"ContainerDied","Data":"d5bc0ec4b7f63945eb54281ef639a564f9f3e3ba4534a2d1b9427db0ead04598"} Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.789999 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d5bc0ec4b7f63945eb54281ef639a564f9f3e3ba4534a2d1b9427db0ead04598" Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.824969 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5rw5w" Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.892458 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-brvlv"] Oct 08 07:41:32 crc kubenswrapper[4693]: E1008 07:41:32.892824 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fd608a1-42a8-47e8-97f8-fc387766fae0" containerName="extract-utilities" Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.892838 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fd608a1-42a8-47e8-97f8-fc387766fae0" containerName="extract-utilities" Oct 08 07:41:32 crc kubenswrapper[4693]: E1008 07:41:32.892854 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac6cb698-ba08-46e2-a8ae-557f656d3209" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.892862 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac6cb698-ba08-46e2-a8ae-557f656d3209" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 08 07:41:32 crc kubenswrapper[4693]: E1008 07:41:32.892872 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fd608a1-42a8-47e8-97f8-fc387766fae0" containerName="extract-content" Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.892877 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fd608a1-42a8-47e8-97f8-fc387766fae0" containerName="extract-content" Oct 08 07:41:32 crc kubenswrapper[4693]: E1008 07:41:32.892897 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fd608a1-42a8-47e8-97f8-fc387766fae0" containerName="registry-server" Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.892903 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fd608a1-42a8-47e8-97f8-fc387766fae0" containerName="registry-server" Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.893086 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fd608a1-42a8-47e8-97f8-fc387766fae0" containerName="registry-server" Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.893103 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac6cb698-ba08-46e2-a8ae-557f656d3209" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.893624 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-brvlv" Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.903036 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.903226 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zxm2d" Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.903331 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.903451 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.914224 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-brvlv"] Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.937406 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fd608a1-42a8-47e8-97f8-fc387766fae0-catalog-content\") pod \"1fd608a1-42a8-47e8-97f8-fc387766fae0\" (UID: \"1fd608a1-42a8-47e8-97f8-fc387766fae0\") " Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.937523 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fd608a1-42a8-47e8-97f8-fc387766fae0-utilities\") pod \"1fd608a1-42a8-47e8-97f8-fc387766fae0\" (UID: \"1fd608a1-42a8-47e8-97f8-fc387766fae0\") " Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.937609 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6q2t\" (UniqueName: \"kubernetes.io/projected/1fd608a1-42a8-47e8-97f8-fc387766fae0-kube-api-access-f6q2t\") pod \"1fd608a1-42a8-47e8-97f8-fc387766fae0\" (UID: \"1fd608a1-42a8-47e8-97f8-fc387766fae0\") " Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.939160 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1fd608a1-42a8-47e8-97f8-fc387766fae0-utilities" (OuterVolumeSpecName: "utilities") pod "1fd608a1-42a8-47e8-97f8-fc387766fae0" (UID: "1fd608a1-42a8-47e8-97f8-fc387766fae0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:41:32 crc kubenswrapper[4693]: I1008 07:41:32.944685 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fd608a1-42a8-47e8-97f8-fc387766fae0-kube-api-access-f6q2t" (OuterVolumeSpecName: "kube-api-access-f6q2t") pod "1fd608a1-42a8-47e8-97f8-fc387766fae0" (UID: "1fd608a1-42a8-47e8-97f8-fc387766fae0"). InnerVolumeSpecName "kube-api-access-f6q2t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:41:33 crc kubenswrapper[4693]: I1008 07:41:33.018163 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1fd608a1-42a8-47e8-97f8-fc387766fae0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1fd608a1-42a8-47e8-97f8-fc387766fae0" (UID: "1fd608a1-42a8-47e8-97f8-fc387766fae0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:41:33 crc kubenswrapper[4693]: I1008 07:41:33.040022 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9trs\" (UniqueName: \"kubernetes.io/projected/a7a301e7-dfc4-47d6-acf1-f34b19e1e13a-kube-api-access-z9trs\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-brvlv\" (UID: \"a7a301e7-dfc4-47d6-acf1-f34b19e1e13a\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-brvlv" Oct 08 07:41:33 crc kubenswrapper[4693]: I1008 07:41:33.040098 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7a301e7-dfc4-47d6-acf1-f34b19e1e13a-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-brvlv\" (UID: \"a7a301e7-dfc4-47d6-acf1-f34b19e1e13a\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-brvlv" Oct 08 07:41:33 crc kubenswrapper[4693]: I1008 07:41:33.040179 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7a301e7-dfc4-47d6-acf1-f34b19e1e13a-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-brvlv\" (UID: \"a7a301e7-dfc4-47d6-acf1-f34b19e1e13a\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-brvlv" Oct 08 07:41:33 crc kubenswrapper[4693]: I1008 07:41:33.040229 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fd608a1-42a8-47e8-97f8-fc387766fae0-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 07:41:33 crc kubenswrapper[4693]: I1008 07:41:33.040240 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fd608a1-42a8-47e8-97f8-fc387766fae0-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 07:41:33 crc kubenswrapper[4693]: I1008 07:41:33.040249 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6q2t\" (UniqueName: \"kubernetes.io/projected/1fd608a1-42a8-47e8-97f8-fc387766fae0-kube-api-access-f6q2t\") on node \"crc\" DevicePath \"\"" Oct 08 07:41:33 crc kubenswrapper[4693]: I1008 07:41:33.142363 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7a301e7-dfc4-47d6-acf1-f34b19e1e13a-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-brvlv\" (UID: \"a7a301e7-dfc4-47d6-acf1-f34b19e1e13a\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-brvlv" Oct 08 07:41:33 crc kubenswrapper[4693]: I1008 07:41:33.142786 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9trs\" (UniqueName: \"kubernetes.io/projected/a7a301e7-dfc4-47d6-acf1-f34b19e1e13a-kube-api-access-z9trs\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-brvlv\" (UID: \"a7a301e7-dfc4-47d6-acf1-f34b19e1e13a\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-brvlv" Oct 08 07:41:33 crc kubenswrapper[4693]: I1008 07:41:33.142922 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7a301e7-dfc4-47d6-acf1-f34b19e1e13a-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-brvlv\" (UID: \"a7a301e7-dfc4-47d6-acf1-f34b19e1e13a\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-brvlv" Oct 08 07:41:33 crc kubenswrapper[4693]: I1008 07:41:33.146190 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7a301e7-dfc4-47d6-acf1-f34b19e1e13a-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-brvlv\" (UID: \"a7a301e7-dfc4-47d6-acf1-f34b19e1e13a\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-brvlv" Oct 08 07:41:33 crc kubenswrapper[4693]: I1008 07:41:33.151519 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7a301e7-dfc4-47d6-acf1-f34b19e1e13a-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-brvlv\" (UID: \"a7a301e7-dfc4-47d6-acf1-f34b19e1e13a\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-brvlv" Oct 08 07:41:33 crc kubenswrapper[4693]: I1008 07:41:33.172121 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9trs\" (UniqueName: \"kubernetes.io/projected/a7a301e7-dfc4-47d6-acf1-f34b19e1e13a-kube-api-access-z9trs\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-brvlv\" (UID: \"a7a301e7-dfc4-47d6-acf1-f34b19e1e13a\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-brvlv" Oct 08 07:41:33 crc kubenswrapper[4693]: I1008 07:41:33.215041 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-brvlv" Oct 08 07:41:33 crc kubenswrapper[4693]: I1008 07:41:33.792462 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-brvlv"] Oct 08 07:41:33 crc kubenswrapper[4693]: I1008 07:41:33.803524 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5rw5w" Oct 08 07:41:33 crc kubenswrapper[4693]: I1008 07:41:33.863616 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5rw5w"] Oct 08 07:41:33 crc kubenswrapper[4693]: I1008 07:41:33.873768 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5rw5w"] Oct 08 07:41:34 crc kubenswrapper[4693]: I1008 07:41:34.820753 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-brvlv" event={"ID":"a7a301e7-dfc4-47d6-acf1-f34b19e1e13a","Type":"ContainerStarted","Data":"fe72b03bdbc250754ab38a8f08e829a80b263590a38fd4b4fdb33036d1e6b19f"} Oct 08 07:41:34 crc kubenswrapper[4693]: I1008 07:41:34.821071 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-brvlv" event={"ID":"a7a301e7-dfc4-47d6-acf1-f34b19e1e13a","Type":"ContainerStarted","Data":"4cdb716f17e6c5f83a14c290c00ebfd21086f210a5f5ac6ac0ed2cd9b5dce025"} Oct 08 07:41:34 crc kubenswrapper[4693]: I1008 07:41:34.852625 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-brvlv" podStartSLOduration=2.157229381 podStartE2EDuration="2.852605757s" podCreationTimestamp="2025-10-08 07:41:32 +0000 UTC" firstStartedPulling="2025-10-08 07:41:33.79612883 +0000 UTC m=+1479.167093775" lastFinishedPulling="2025-10-08 07:41:34.491505176 +0000 UTC m=+1479.862470151" observedRunningTime="2025-10-08 07:41:34.847650901 +0000 UTC m=+1480.218615836" watchObservedRunningTime="2025-10-08 07:41:34.852605757 +0000 UTC m=+1480.223570692" Oct 08 07:41:35 crc kubenswrapper[4693]: I1008 07:41:35.388156 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fd608a1-42a8-47e8-97f8-fc387766fae0" path="/var/lib/kubelet/pods/1fd608a1-42a8-47e8-97f8-fc387766fae0/volumes" Oct 08 07:41:39 crc kubenswrapper[4693]: I1008 07:41:39.362925 4693 scope.go:117] "RemoveContainer" containerID="c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503" Oct 08 07:41:39 crc kubenswrapper[4693]: E1008 07:41:39.363888 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:41:53 crc kubenswrapper[4693]: I1008 07:41:53.363227 4693 scope.go:117] "RemoveContainer" containerID="c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503" Oct 08 07:41:53 crc kubenswrapper[4693]: E1008 07:41:53.364208 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:42:04 crc kubenswrapper[4693]: I1008 07:42:04.364168 4693 scope.go:117] "RemoveContainer" containerID="c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503" Oct 08 07:42:04 crc kubenswrapper[4693]: E1008 07:42:04.365367 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:42:16 crc kubenswrapper[4693]: I1008 07:42:16.786300 4693 scope.go:117] "RemoveContainer" containerID="2187123fe1df6fb7f983953ce88dfbd095410762cee218bbf0a5d2748c7c5972" Oct 08 07:42:16 crc kubenswrapper[4693]: I1008 07:42:16.830075 4693 scope.go:117] "RemoveContainer" containerID="a4eb5f55dad5ab14bc0bd5528fbc99b80a63f477a1c1f4fdafcd37484c7fa5f3" Oct 08 07:42:16 crc kubenswrapper[4693]: I1008 07:42:16.886629 4693 scope.go:117] "RemoveContainer" containerID="ccec4e693d93421248fb10c020efdee7de15bd4a2c5ea1dff34b9a18c8dc1edf" Oct 08 07:42:16 crc kubenswrapper[4693]: I1008 07:42:16.907695 4693 scope.go:117] "RemoveContainer" containerID="810fb65cfcec786bebcb86c8f20c86dd38990f62002150f52db5e3d2306d442d" Oct 08 07:42:16 crc kubenswrapper[4693]: I1008 07:42:16.947807 4693 scope.go:117] "RemoveContainer" containerID="71b557e2333b8eab5cdd19bd4faf7bf89a9615ef0f68a50784d37723ae794227" Oct 08 07:42:18 crc kubenswrapper[4693]: I1008 07:42:18.363254 4693 scope.go:117] "RemoveContainer" containerID="c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503" Oct 08 07:42:18 crc kubenswrapper[4693]: E1008 07:42:18.363680 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:42:31 crc kubenswrapper[4693]: I1008 07:42:31.362770 4693 scope.go:117] "RemoveContainer" containerID="c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503" Oct 08 07:42:31 crc kubenswrapper[4693]: E1008 07:42:31.363588 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:42:40 crc kubenswrapper[4693]: I1008 07:42:40.076857 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-5p746"] Oct 08 07:42:40 crc kubenswrapper[4693]: I1008 07:42:40.100734 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-nlzwf"] Oct 08 07:42:40 crc kubenswrapper[4693]: I1008 07:42:40.113711 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-zx76x"] Oct 08 07:42:40 crc kubenswrapper[4693]: I1008 07:42:40.123028 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-nlzwf"] Oct 08 07:42:40 crc kubenswrapper[4693]: I1008 07:42:40.133196 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-5p746"] Oct 08 07:42:40 crc kubenswrapper[4693]: I1008 07:42:40.140760 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-zx76x"] Oct 08 07:42:41 crc kubenswrapper[4693]: I1008 07:42:41.393422 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b169bf2-af33-4a98-bccc-43de585c357c" path="/var/lib/kubelet/pods/7b169bf2-af33-4a98-bccc-43de585c357c/volumes" Oct 08 07:42:41 crc kubenswrapper[4693]: I1008 07:42:41.394240 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89ae7404-8482-48a0-8747-1b4feb15d69d" path="/var/lib/kubelet/pods/89ae7404-8482-48a0-8747-1b4feb15d69d/volumes" Oct 08 07:42:41 crc kubenswrapper[4693]: I1008 07:42:41.396412 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9a1dd78-9d6b-4d47-99c1-4823a2c653c2" path="/var/lib/kubelet/pods/f9a1dd78-9d6b-4d47-99c1-4823a2c653c2/volumes" Oct 08 07:42:44 crc kubenswrapper[4693]: I1008 07:42:44.362522 4693 scope.go:117] "RemoveContainer" containerID="c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503" Oct 08 07:42:44 crc kubenswrapper[4693]: E1008 07:42:44.363203 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:42:50 crc kubenswrapper[4693]: I1008 07:42:50.053276 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-4860-account-create-j9764"] Oct 08 07:42:50 crc kubenswrapper[4693]: I1008 07:42:50.065740 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-4860-account-create-j9764"] Oct 08 07:42:51 crc kubenswrapper[4693]: I1008 07:42:51.378788 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ab5a771-9b81-47c8-ab27-83c657676763" path="/var/lib/kubelet/pods/7ab5a771-9b81-47c8-ab27-83c657676763/volumes" Oct 08 07:42:55 crc kubenswrapper[4693]: I1008 07:42:55.038234 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-5xhnc"] Oct 08 07:42:55 crc kubenswrapper[4693]: I1008 07:42:55.055086 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-5xhnc"] Oct 08 07:42:55 crc kubenswrapper[4693]: I1008 07:42:55.063049 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-bg956"] Oct 08 07:42:55 crc kubenswrapper[4693]: I1008 07:42:55.085889 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-w6cgs"] Oct 08 07:42:55 crc kubenswrapper[4693]: I1008 07:42:55.100547 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-w6cgs"] Oct 08 07:42:55 crc kubenswrapper[4693]: I1008 07:42:55.110103 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-bg956"] Oct 08 07:42:55 crc kubenswrapper[4693]: I1008 07:42:55.380464 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a70d58e-b552-4ff1-a632-47debfe9ffbf" path="/var/lib/kubelet/pods/4a70d58e-b552-4ff1-a632-47debfe9ffbf/volumes" Oct 08 07:42:55 crc kubenswrapper[4693]: I1008 07:42:55.380999 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e7ed96c-26ab-4895-bcc8-35bba61e5240" path="/var/lib/kubelet/pods/4e7ed96c-26ab-4895-bcc8-35bba61e5240/volumes" Oct 08 07:42:55 crc kubenswrapper[4693]: I1008 07:42:55.381578 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68d99e15-182a-46b3-8478-d0f2b3763662" path="/var/lib/kubelet/pods/68d99e15-182a-46b3-8478-d0f2b3763662/volumes" Oct 08 07:42:56 crc kubenswrapper[4693]: I1008 07:42:56.363177 4693 scope.go:117] "RemoveContainer" containerID="c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503" Oct 08 07:42:56 crc kubenswrapper[4693]: E1008 07:42:56.364128 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:42:58 crc kubenswrapper[4693]: I1008 07:42:58.030209 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-003f-account-create-s8v4s"] Oct 08 07:42:58 crc kubenswrapper[4693]: I1008 07:42:58.038147 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-e9f5-account-create-4vwfk"] Oct 08 07:42:58 crc kubenswrapper[4693]: I1008 07:42:58.044681 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-003f-account-create-s8v4s"] Oct 08 07:42:58 crc kubenswrapper[4693]: I1008 07:42:58.050615 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-e9f5-account-create-4vwfk"] Oct 08 07:42:59 crc kubenswrapper[4693]: I1008 07:42:59.383229 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b0a8408-551b-4cbc-996c-9ddc40c25642" path="/var/lib/kubelet/pods/1b0a8408-551b-4cbc-996c-9ddc40c25642/volumes" Oct 08 07:42:59 crc kubenswrapper[4693]: I1008 07:42:59.387171 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b56ccec5-1b44-4461-8d85-45b35e108b39" path="/var/lib/kubelet/pods/b56ccec5-1b44-4461-8d85-45b35e108b39/volumes" Oct 08 07:43:09 crc kubenswrapper[4693]: I1008 07:43:09.364223 4693 scope.go:117] "RemoveContainer" containerID="c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503" Oct 08 07:43:09 crc kubenswrapper[4693]: E1008 07:43:09.366946 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:43:10 crc kubenswrapper[4693]: I1008 07:43:10.895998 4693 generic.go:334] "Generic (PLEG): container finished" podID="a7a301e7-dfc4-47d6-acf1-f34b19e1e13a" containerID="fe72b03bdbc250754ab38a8f08e829a80b263590a38fd4b4fdb33036d1e6b19f" exitCode=0 Oct 08 07:43:10 crc kubenswrapper[4693]: I1008 07:43:10.896271 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-brvlv" event={"ID":"a7a301e7-dfc4-47d6-acf1-f34b19e1e13a","Type":"ContainerDied","Data":"fe72b03bdbc250754ab38a8f08e829a80b263590a38fd4b4fdb33036d1e6b19f"} Oct 08 07:43:12 crc kubenswrapper[4693]: I1008 07:43:12.340924 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-brvlv" Oct 08 07:43:12 crc kubenswrapper[4693]: I1008 07:43:12.482883 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9trs\" (UniqueName: \"kubernetes.io/projected/a7a301e7-dfc4-47d6-acf1-f34b19e1e13a-kube-api-access-z9trs\") pod \"a7a301e7-dfc4-47d6-acf1-f34b19e1e13a\" (UID: \"a7a301e7-dfc4-47d6-acf1-f34b19e1e13a\") " Oct 08 07:43:12 crc kubenswrapper[4693]: I1008 07:43:12.483138 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7a301e7-dfc4-47d6-acf1-f34b19e1e13a-ssh-key\") pod \"a7a301e7-dfc4-47d6-acf1-f34b19e1e13a\" (UID: \"a7a301e7-dfc4-47d6-acf1-f34b19e1e13a\") " Oct 08 07:43:12 crc kubenswrapper[4693]: I1008 07:43:12.483223 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7a301e7-dfc4-47d6-acf1-f34b19e1e13a-inventory\") pod \"a7a301e7-dfc4-47d6-acf1-f34b19e1e13a\" (UID: \"a7a301e7-dfc4-47d6-acf1-f34b19e1e13a\") " Oct 08 07:43:12 crc kubenswrapper[4693]: I1008 07:43:12.490432 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7a301e7-dfc4-47d6-acf1-f34b19e1e13a-kube-api-access-z9trs" (OuterVolumeSpecName: "kube-api-access-z9trs") pod "a7a301e7-dfc4-47d6-acf1-f34b19e1e13a" (UID: "a7a301e7-dfc4-47d6-acf1-f34b19e1e13a"). InnerVolumeSpecName "kube-api-access-z9trs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:43:12 crc kubenswrapper[4693]: I1008 07:43:12.510925 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7a301e7-dfc4-47d6-acf1-f34b19e1e13a-inventory" (OuterVolumeSpecName: "inventory") pod "a7a301e7-dfc4-47d6-acf1-f34b19e1e13a" (UID: "a7a301e7-dfc4-47d6-acf1-f34b19e1e13a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:43:12 crc kubenswrapper[4693]: I1008 07:43:12.541776 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7a301e7-dfc4-47d6-acf1-f34b19e1e13a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a7a301e7-dfc4-47d6-acf1-f34b19e1e13a" (UID: "a7a301e7-dfc4-47d6-acf1-f34b19e1e13a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:43:12 crc kubenswrapper[4693]: I1008 07:43:12.587729 4693 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7a301e7-dfc4-47d6-acf1-f34b19e1e13a-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 08 07:43:12 crc kubenswrapper[4693]: I1008 07:43:12.587782 4693 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7a301e7-dfc4-47d6-acf1-f34b19e1e13a-inventory\") on node \"crc\" DevicePath \"\"" Oct 08 07:43:12 crc kubenswrapper[4693]: I1008 07:43:12.587804 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9trs\" (UniqueName: \"kubernetes.io/projected/a7a301e7-dfc4-47d6-acf1-f34b19e1e13a-kube-api-access-z9trs\") on node \"crc\" DevicePath \"\"" Oct 08 07:43:12 crc kubenswrapper[4693]: I1008 07:43:12.921082 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-brvlv" event={"ID":"a7a301e7-dfc4-47d6-acf1-f34b19e1e13a","Type":"ContainerDied","Data":"4cdb716f17e6c5f83a14c290c00ebfd21086f210a5f5ac6ac0ed2cd9b5dce025"} Oct 08 07:43:12 crc kubenswrapper[4693]: I1008 07:43:12.921144 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4cdb716f17e6c5f83a14c290c00ebfd21086f210a5f5ac6ac0ed2cd9b5dce025" Oct 08 07:43:12 crc kubenswrapper[4693]: I1008 07:43:12.921174 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-brvlv" Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.054224 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-727b5"] Oct 08 07:43:13 crc kubenswrapper[4693]: E1008 07:43:13.056784 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7a301e7-dfc4-47d6-acf1-f34b19e1e13a" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.056824 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7a301e7-dfc4-47d6-acf1-f34b19e1e13a" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.061487 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7a301e7-dfc4-47d6-acf1-f34b19e1e13a" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.062573 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-727b5" Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.066621 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.066883 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zxm2d" Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.067960 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.068114 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.074910 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-727b5"] Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.084867 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-66d0-account-create-nvqcp"] Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.092951 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-6e46-account-create-cd8wf"] Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.099339 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-b816-account-create-vwpn5"] Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.107505 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2faa8ca0-e93e-4532-bf6c-00f2064bf177-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-727b5\" (UID: \"2faa8ca0-e93e-4532-bf6c-00f2064bf177\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-727b5" Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.107590 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zvrp\" (UniqueName: \"kubernetes.io/projected/2faa8ca0-e93e-4532-bf6c-00f2064bf177-kube-api-access-5zvrp\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-727b5\" (UID: \"2faa8ca0-e93e-4532-bf6c-00f2064bf177\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-727b5" Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.107765 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2faa8ca0-e93e-4532-bf6c-00f2064bf177-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-727b5\" (UID: \"2faa8ca0-e93e-4532-bf6c-00f2064bf177\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-727b5" Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.108122 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-66d0-account-create-nvqcp"] Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.114778 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-b816-account-create-vwpn5"] Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.121070 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-6e46-account-create-cd8wf"] Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.209478 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2faa8ca0-e93e-4532-bf6c-00f2064bf177-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-727b5\" (UID: \"2faa8ca0-e93e-4532-bf6c-00f2064bf177\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-727b5" Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.209785 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zvrp\" (UniqueName: \"kubernetes.io/projected/2faa8ca0-e93e-4532-bf6c-00f2064bf177-kube-api-access-5zvrp\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-727b5\" (UID: \"2faa8ca0-e93e-4532-bf6c-00f2064bf177\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-727b5" Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.209863 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2faa8ca0-e93e-4532-bf6c-00f2064bf177-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-727b5\" (UID: \"2faa8ca0-e93e-4532-bf6c-00f2064bf177\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-727b5" Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.218416 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2faa8ca0-e93e-4532-bf6c-00f2064bf177-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-727b5\" (UID: \"2faa8ca0-e93e-4532-bf6c-00f2064bf177\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-727b5" Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.222354 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2faa8ca0-e93e-4532-bf6c-00f2064bf177-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-727b5\" (UID: \"2faa8ca0-e93e-4532-bf6c-00f2064bf177\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-727b5" Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.228246 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zvrp\" (UniqueName: \"kubernetes.io/projected/2faa8ca0-e93e-4532-bf6c-00f2064bf177-kube-api-access-5zvrp\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-727b5\" (UID: \"2faa8ca0-e93e-4532-bf6c-00f2064bf177\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-727b5" Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.381981 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0668b2a6-74b5-4ac0-83f2-de6f7858c66c" path="/var/lib/kubelet/pods/0668b2a6-74b5-4ac0-83f2-de6f7858c66c/volumes" Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.383193 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="755f3d62-7b37-470b-94ae-9e9c6a7df2d1" path="/var/lib/kubelet/pods/755f3d62-7b37-470b-94ae-9e9c6a7df2d1/volumes" Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.384305 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7a20db2-171f-4856-a3e0-416a6ad7c27c" path="/var/lib/kubelet/pods/a7a20db2-171f-4856-a3e0-416a6ad7c27c/volumes" Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.385522 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-727b5" Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.781489 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-727b5"] Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.785998 4693 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 08 07:43:13 crc kubenswrapper[4693]: I1008 07:43:13.932256 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-727b5" event={"ID":"2faa8ca0-e93e-4532-bf6c-00f2064bf177","Type":"ContainerStarted","Data":"537893e7670e6b425005543a9264d55ae071cadbc704784e94d483721169f1b1"} Oct 08 07:43:14 crc kubenswrapper[4693]: I1008 07:43:14.956423 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-727b5" event={"ID":"2faa8ca0-e93e-4532-bf6c-00f2064bf177","Type":"ContainerStarted","Data":"94e95161392947e35b84d93e1691a141115ad43ee39a34786170d6b4ae6c13fe"} Oct 08 07:43:14 crc kubenswrapper[4693]: I1008 07:43:14.985301 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-727b5" podStartSLOduration=2.428705434 podStartE2EDuration="2.985279824s" podCreationTimestamp="2025-10-08 07:43:12 +0000 UTC" firstStartedPulling="2025-10-08 07:43:13.785714013 +0000 UTC m=+1579.156678958" lastFinishedPulling="2025-10-08 07:43:14.342288363 +0000 UTC m=+1579.713253348" observedRunningTime="2025-10-08 07:43:14.97932524 +0000 UTC m=+1580.350290185" watchObservedRunningTime="2025-10-08 07:43:14.985279824 +0000 UTC m=+1580.356244769" Oct 08 07:43:17 crc kubenswrapper[4693]: I1008 07:43:17.051074 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-p8hg9"] Oct 08 07:43:17 crc kubenswrapper[4693]: I1008 07:43:17.062906 4693 scope.go:117] "RemoveContainer" containerID="5ff9d3b88c4c586ef50657ee69e1fc6c5d2b9f4a2b646ca4399c5c2378791e44" Oct 08 07:43:17 crc kubenswrapper[4693]: I1008 07:43:17.067267 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-k4zk2"] Oct 08 07:43:17 crc kubenswrapper[4693]: I1008 07:43:17.077993 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-p8hg9"] Oct 08 07:43:17 crc kubenswrapper[4693]: I1008 07:43:17.084224 4693 scope.go:117] "RemoveContainer" containerID="9109a4c49db16e29b7012ed1bc6a522e706bb0b442f27e99d3e18ace6540341f" Oct 08 07:43:17 crc kubenswrapper[4693]: I1008 07:43:17.087853 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-k4zk2"] Oct 08 07:43:17 crc kubenswrapper[4693]: I1008 07:43:17.144174 4693 scope.go:117] "RemoveContainer" containerID="58d65e6065311e9a0ef56636ed6d2a5e0d289209a91bc6b2270cf4eaf6b31f73" Oct 08 07:43:17 crc kubenswrapper[4693]: I1008 07:43:17.184914 4693 scope.go:117] "RemoveContainer" containerID="474fa088465bb1d6f5ca25ec9134f34e5ef950d607d402f4a87e5e3448780638" Oct 08 07:43:17 crc kubenswrapper[4693]: I1008 07:43:17.224125 4693 scope.go:117] "RemoveContainer" containerID="df20617ee487cf96aef76706c8af9bf446bd542042bb9cc2f95eb8bf9b42039e" Oct 08 07:43:17 crc kubenswrapper[4693]: I1008 07:43:17.266117 4693 scope.go:117] "RemoveContainer" containerID="6a3db8abcfcbbf5f365392873d0f2e33bc7484daa8eb91238b763a8e43385000" Oct 08 07:43:17 crc kubenswrapper[4693]: I1008 07:43:17.313684 4693 scope.go:117] "RemoveContainer" containerID="a3ad3388bf012c4bf6024ea242085f3e5d118d0fab92a49bc08bacd4790fec7f" Oct 08 07:43:17 crc kubenswrapper[4693]: I1008 07:43:17.337970 4693 scope.go:117] "RemoveContainer" containerID="11b71e180e40027c311985fde67763af5c3c9ae59387c442c1221110f80b31dd" Oct 08 07:43:17 crc kubenswrapper[4693]: I1008 07:43:17.369066 4693 scope.go:117] "RemoveContainer" containerID="a886b1226d70e20ec5e77d3c402549df1a2160889c6107fdd07ee8e27263ef58" Oct 08 07:43:17 crc kubenswrapper[4693]: I1008 07:43:17.381713 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15ef4c47-e897-4afe-839d-31e54512a16b" path="/var/lib/kubelet/pods/15ef4c47-e897-4afe-839d-31e54512a16b/volumes" Oct 08 07:43:17 crc kubenswrapper[4693]: I1008 07:43:17.382484 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4" path="/var/lib/kubelet/pods/c6b00a8e-3a52-43d4-a9d3-1e73b0c78ac4/volumes" Oct 08 07:43:17 crc kubenswrapper[4693]: I1008 07:43:17.393107 4693 scope.go:117] "RemoveContainer" containerID="5de6b01f998c465874b3020e3f3eb0c67270542cb6a99faa4bb331d2de3a0fee" Oct 08 07:43:17 crc kubenswrapper[4693]: I1008 07:43:17.412284 4693 scope.go:117] "RemoveContainer" containerID="84c451866e755a53a71e598d616e8c463a11edfeb3a9c23389f1061ff5797574" Oct 08 07:43:17 crc kubenswrapper[4693]: I1008 07:43:17.433481 4693 scope.go:117] "RemoveContainer" containerID="4588f464f4698364fbc5d20292315c7b643f212a182dea3c6c7d4279f0c7b3b5" Oct 08 07:43:17 crc kubenswrapper[4693]: I1008 07:43:17.467402 4693 scope.go:117] "RemoveContainer" containerID="e49d78069afa8794b626d0768b44883b7157b83915c4533193528f2f87e0f5ba" Oct 08 07:43:20 crc kubenswrapper[4693]: I1008 07:43:20.363846 4693 scope.go:117] "RemoveContainer" containerID="c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503" Oct 08 07:43:20 crc kubenswrapper[4693]: E1008 07:43:20.364701 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:43:33 crc kubenswrapper[4693]: I1008 07:43:33.366625 4693 scope.go:117] "RemoveContainer" containerID="c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503" Oct 08 07:43:33 crc kubenswrapper[4693]: E1008 07:43:33.367964 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:43:46 crc kubenswrapper[4693]: I1008 07:43:46.362835 4693 scope.go:117] "RemoveContainer" containerID="c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503" Oct 08 07:43:46 crc kubenswrapper[4693]: E1008 07:43:46.363558 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:43:51 crc kubenswrapper[4693]: I1008 07:43:51.056070 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-trdh5"] Oct 08 07:43:51 crc kubenswrapper[4693]: I1008 07:43:51.073575 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-trdh5"] Oct 08 07:43:51 crc kubenswrapper[4693]: I1008 07:43:51.378465 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21be0841-c23b-4e2a-96dd-eebb788a1104" path="/var/lib/kubelet/pods/21be0841-c23b-4e2a-96dd-eebb788a1104/volumes" Oct 08 07:44:00 crc kubenswrapper[4693]: I1008 07:44:00.046409 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-m4xlg"] Oct 08 07:44:00 crc kubenswrapper[4693]: I1008 07:44:00.061861 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-m4xlg"] Oct 08 07:44:00 crc kubenswrapper[4693]: I1008 07:44:00.363398 4693 scope.go:117] "RemoveContainer" containerID="c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503" Oct 08 07:44:00 crc kubenswrapper[4693]: E1008 07:44:00.364468 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:44:01 crc kubenswrapper[4693]: I1008 07:44:01.032854 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-rxtmf"] Oct 08 07:44:01 crc kubenswrapper[4693]: I1008 07:44:01.039229 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-7xn4v"] Oct 08 07:44:01 crc kubenswrapper[4693]: I1008 07:44:01.046268 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-7xn4v"] Oct 08 07:44:01 crc kubenswrapper[4693]: I1008 07:44:01.053752 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-rxtmf"] Oct 08 07:44:01 crc kubenswrapper[4693]: I1008 07:44:01.382490 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04536603-9f35-460a-b169-a462d38283b9" path="/var/lib/kubelet/pods/04536603-9f35-460a-b169-a462d38283b9/volumes" Oct 08 07:44:01 crc kubenswrapper[4693]: I1008 07:44:01.384017 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79884c8c-f689-46b7-9223-66bd0b7bff8e" path="/var/lib/kubelet/pods/79884c8c-f689-46b7-9223-66bd0b7bff8e/volumes" Oct 08 07:44:01 crc kubenswrapper[4693]: I1008 07:44:01.385578 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4d301e9-d078-4876-a6a2-52a7c3b4dcbe" path="/var/lib/kubelet/pods/b4d301e9-d078-4876-a6a2-52a7c3b4dcbe/volumes" Oct 08 07:44:11 crc kubenswrapper[4693]: I1008 07:44:11.041147 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-r7p2j"] Oct 08 07:44:11 crc kubenswrapper[4693]: I1008 07:44:11.056384 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-r7p2j"] Oct 08 07:44:11 crc kubenswrapper[4693]: I1008 07:44:11.363759 4693 scope.go:117] "RemoveContainer" containerID="c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503" Oct 08 07:44:11 crc kubenswrapper[4693]: E1008 07:44:11.364545 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:44:11 crc kubenswrapper[4693]: I1008 07:44:11.383394 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1bdcd99-a53d-45ee-b439-57c0e0025fb9" path="/var/lib/kubelet/pods/e1bdcd99-a53d-45ee-b439-57c0e0025fb9/volumes" Oct 08 07:44:17 crc kubenswrapper[4693]: I1008 07:44:17.748902 4693 scope.go:117] "RemoveContainer" containerID="dc4f68dded1f91f34c7f0e69513d47d93be66115785ca34b370b996156a1dcda" Oct 08 07:44:17 crc kubenswrapper[4693]: I1008 07:44:17.798416 4693 scope.go:117] "RemoveContainer" containerID="db6f7cd08db4690295e1fa61dc78bec7b1189a82f31b277a7de282b99972fe63" Oct 08 07:44:17 crc kubenswrapper[4693]: I1008 07:44:17.847312 4693 scope.go:117] "RemoveContainer" containerID="c6093f0600894f583cae174d5ee703161897f41507011bfd222d6d824898c7a1" Oct 08 07:44:17 crc kubenswrapper[4693]: I1008 07:44:17.887610 4693 scope.go:117] "RemoveContainer" containerID="fbfe4123cc4ff84eda46dc37b6caca960cdd19adbd16833385533ac145451e09" Oct 08 07:44:17 crc kubenswrapper[4693]: I1008 07:44:17.916255 4693 scope.go:117] "RemoveContainer" containerID="241f808f0059de9604d1f15a065ed76a29e9151236ae2833248db83af252b645" Oct 08 07:44:17 crc kubenswrapper[4693]: I1008 07:44:17.961841 4693 scope.go:117] "RemoveContainer" containerID="e6d6086133e430d57e92411d11b7b846138ac1be139229c339e52287578db8e0" Oct 08 07:44:24 crc kubenswrapper[4693]: I1008 07:44:24.363229 4693 scope.go:117] "RemoveContainer" containerID="c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503" Oct 08 07:44:24 crc kubenswrapper[4693]: E1008 07:44:24.364057 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:44:32 crc kubenswrapper[4693]: I1008 07:44:32.864765 4693 generic.go:334] "Generic (PLEG): container finished" podID="2faa8ca0-e93e-4532-bf6c-00f2064bf177" containerID="94e95161392947e35b84d93e1691a141115ad43ee39a34786170d6b4ae6c13fe" exitCode=0 Oct 08 07:44:32 crc kubenswrapper[4693]: I1008 07:44:32.864876 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-727b5" event={"ID":"2faa8ca0-e93e-4532-bf6c-00f2064bf177","Type":"ContainerDied","Data":"94e95161392947e35b84d93e1691a141115ad43ee39a34786170d6b4ae6c13fe"} Oct 08 07:44:34 crc kubenswrapper[4693]: I1008 07:44:34.290260 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-727b5" Oct 08 07:44:34 crc kubenswrapper[4693]: I1008 07:44:34.309748 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2faa8ca0-e93e-4532-bf6c-00f2064bf177-inventory\") pod \"2faa8ca0-e93e-4532-bf6c-00f2064bf177\" (UID: \"2faa8ca0-e93e-4532-bf6c-00f2064bf177\") " Oct 08 07:44:34 crc kubenswrapper[4693]: I1008 07:44:34.309920 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2faa8ca0-e93e-4532-bf6c-00f2064bf177-ssh-key\") pod \"2faa8ca0-e93e-4532-bf6c-00f2064bf177\" (UID: \"2faa8ca0-e93e-4532-bf6c-00f2064bf177\") " Oct 08 07:44:34 crc kubenswrapper[4693]: I1008 07:44:34.309964 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5zvrp\" (UniqueName: \"kubernetes.io/projected/2faa8ca0-e93e-4532-bf6c-00f2064bf177-kube-api-access-5zvrp\") pod \"2faa8ca0-e93e-4532-bf6c-00f2064bf177\" (UID: \"2faa8ca0-e93e-4532-bf6c-00f2064bf177\") " Oct 08 07:44:34 crc kubenswrapper[4693]: I1008 07:44:34.319098 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2faa8ca0-e93e-4532-bf6c-00f2064bf177-kube-api-access-5zvrp" (OuterVolumeSpecName: "kube-api-access-5zvrp") pod "2faa8ca0-e93e-4532-bf6c-00f2064bf177" (UID: "2faa8ca0-e93e-4532-bf6c-00f2064bf177"). InnerVolumeSpecName "kube-api-access-5zvrp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:44:34 crc kubenswrapper[4693]: I1008 07:44:34.355287 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2faa8ca0-e93e-4532-bf6c-00f2064bf177-inventory" (OuterVolumeSpecName: "inventory") pod "2faa8ca0-e93e-4532-bf6c-00f2064bf177" (UID: "2faa8ca0-e93e-4532-bf6c-00f2064bf177"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:44:34 crc kubenswrapper[4693]: I1008 07:44:34.357882 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2faa8ca0-e93e-4532-bf6c-00f2064bf177-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2faa8ca0-e93e-4532-bf6c-00f2064bf177" (UID: "2faa8ca0-e93e-4532-bf6c-00f2064bf177"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:44:34 crc kubenswrapper[4693]: I1008 07:44:34.412099 4693 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2faa8ca0-e93e-4532-bf6c-00f2064bf177-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 08 07:44:34 crc kubenswrapper[4693]: I1008 07:44:34.412135 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5zvrp\" (UniqueName: \"kubernetes.io/projected/2faa8ca0-e93e-4532-bf6c-00f2064bf177-kube-api-access-5zvrp\") on node \"crc\" DevicePath \"\"" Oct 08 07:44:34 crc kubenswrapper[4693]: I1008 07:44:34.412147 4693 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2faa8ca0-e93e-4532-bf6c-00f2064bf177-inventory\") on node \"crc\" DevicePath \"\"" Oct 08 07:44:34 crc kubenswrapper[4693]: I1008 07:44:34.885532 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-727b5" event={"ID":"2faa8ca0-e93e-4532-bf6c-00f2064bf177","Type":"ContainerDied","Data":"537893e7670e6b425005543a9264d55ae071cadbc704784e94d483721169f1b1"} Oct 08 07:44:34 crc kubenswrapper[4693]: I1008 07:44:34.885581 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="537893e7670e6b425005543a9264d55ae071cadbc704784e94d483721169f1b1" Oct 08 07:44:34 crc kubenswrapper[4693]: I1008 07:44:34.885611 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-727b5" Oct 08 07:44:34 crc kubenswrapper[4693]: I1008 07:44:34.997424 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz"] Oct 08 07:44:34 crc kubenswrapper[4693]: E1008 07:44:34.997990 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2faa8ca0-e93e-4532-bf6c-00f2064bf177" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 08 07:44:34 crc kubenswrapper[4693]: I1008 07:44:34.998014 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="2faa8ca0-e93e-4532-bf6c-00f2064bf177" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 08 07:44:34 crc kubenswrapper[4693]: I1008 07:44:34.998257 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="2faa8ca0-e93e-4532-bf6c-00f2064bf177" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 08 07:44:34 crc kubenswrapper[4693]: I1008 07:44:34.999133 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz" Oct 08 07:44:35 crc kubenswrapper[4693]: I1008 07:44:35.001676 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zxm2d" Oct 08 07:44:35 crc kubenswrapper[4693]: I1008 07:44:35.002046 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 08 07:44:35 crc kubenswrapper[4693]: I1008 07:44:35.003780 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 08 07:44:35 crc kubenswrapper[4693]: I1008 07:44:35.004287 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 08 07:44:35 crc kubenswrapper[4693]: I1008 07:44:35.008321 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz"] Oct 08 07:44:35 crc kubenswrapper[4693]: I1008 07:44:35.024732 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4k6ns\" (UniqueName: \"kubernetes.io/projected/4b1ce098-43e7-44eb-8416-806097ba000e-kube-api-access-4k6ns\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz\" (UID: \"4b1ce098-43e7-44eb-8416-806097ba000e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz" Oct 08 07:44:35 crc kubenswrapper[4693]: I1008 07:44:35.024835 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4b1ce098-43e7-44eb-8416-806097ba000e-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz\" (UID: \"4b1ce098-43e7-44eb-8416-806097ba000e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz" Oct 08 07:44:35 crc kubenswrapper[4693]: I1008 07:44:35.024931 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4b1ce098-43e7-44eb-8416-806097ba000e-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz\" (UID: \"4b1ce098-43e7-44eb-8416-806097ba000e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz" Oct 08 07:44:35 crc kubenswrapper[4693]: I1008 07:44:35.127123 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4b1ce098-43e7-44eb-8416-806097ba000e-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz\" (UID: \"4b1ce098-43e7-44eb-8416-806097ba000e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz" Oct 08 07:44:35 crc kubenswrapper[4693]: I1008 07:44:35.127262 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4b1ce098-43e7-44eb-8416-806097ba000e-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz\" (UID: \"4b1ce098-43e7-44eb-8416-806097ba000e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz" Oct 08 07:44:35 crc kubenswrapper[4693]: I1008 07:44:35.127343 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4k6ns\" (UniqueName: \"kubernetes.io/projected/4b1ce098-43e7-44eb-8416-806097ba000e-kube-api-access-4k6ns\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz\" (UID: \"4b1ce098-43e7-44eb-8416-806097ba000e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz" Oct 08 07:44:35 crc kubenswrapper[4693]: I1008 07:44:35.130392 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4b1ce098-43e7-44eb-8416-806097ba000e-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz\" (UID: \"4b1ce098-43e7-44eb-8416-806097ba000e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz" Oct 08 07:44:35 crc kubenswrapper[4693]: I1008 07:44:35.131160 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4b1ce098-43e7-44eb-8416-806097ba000e-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz\" (UID: \"4b1ce098-43e7-44eb-8416-806097ba000e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz" Oct 08 07:44:35 crc kubenswrapper[4693]: I1008 07:44:35.147652 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4k6ns\" (UniqueName: \"kubernetes.io/projected/4b1ce098-43e7-44eb-8416-806097ba000e-kube-api-access-4k6ns\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz\" (UID: \"4b1ce098-43e7-44eb-8416-806097ba000e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz" Oct 08 07:44:35 crc kubenswrapper[4693]: I1008 07:44:35.323903 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz" Oct 08 07:44:35 crc kubenswrapper[4693]: I1008 07:44:35.932105 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz"] Oct 08 07:44:36 crc kubenswrapper[4693]: I1008 07:44:36.916668 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz" event={"ID":"4b1ce098-43e7-44eb-8416-806097ba000e","Type":"ContainerStarted","Data":"4349001b95cd527aa7580588444f46a8b1c3ad0f6f0ab758bc7bb6b17b8eb519"} Oct 08 07:44:37 crc kubenswrapper[4693]: I1008 07:44:37.929742 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz" event={"ID":"4b1ce098-43e7-44eb-8416-806097ba000e","Type":"ContainerStarted","Data":"9c25a2de4d5e76cb922f41ed08e0f8a3bdb3d9998d1dc809f3deeb35f658f7db"} Oct 08 07:44:37 crc kubenswrapper[4693]: I1008 07:44:37.953858 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz" podStartSLOduration=3.225384506 podStartE2EDuration="3.953834392s" podCreationTimestamp="2025-10-08 07:44:34 +0000 UTC" firstStartedPulling="2025-10-08 07:44:35.939805993 +0000 UTC m=+1661.310770928" lastFinishedPulling="2025-10-08 07:44:36.668255829 +0000 UTC m=+1662.039220814" observedRunningTime="2025-10-08 07:44:37.949201682 +0000 UTC m=+1663.320166657" watchObservedRunningTime="2025-10-08 07:44:37.953834392 +0000 UTC m=+1663.324799327" Oct 08 07:44:39 crc kubenswrapper[4693]: I1008 07:44:39.364910 4693 scope.go:117] "RemoveContainer" containerID="c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503" Oct 08 07:44:39 crc kubenswrapper[4693]: E1008 07:44:39.365510 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:44:42 crc kubenswrapper[4693]: I1008 07:44:42.981736 4693 generic.go:334] "Generic (PLEG): container finished" podID="4b1ce098-43e7-44eb-8416-806097ba000e" containerID="9c25a2de4d5e76cb922f41ed08e0f8a3bdb3d9998d1dc809f3deeb35f658f7db" exitCode=0 Oct 08 07:44:42 crc kubenswrapper[4693]: I1008 07:44:42.981851 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz" event={"ID":"4b1ce098-43e7-44eb-8416-806097ba000e","Type":"ContainerDied","Data":"9c25a2de4d5e76cb922f41ed08e0f8a3bdb3d9998d1dc809f3deeb35f658f7db"} Oct 08 07:44:44 crc kubenswrapper[4693]: I1008 07:44:44.481803 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz" Oct 08 07:44:44 crc kubenswrapper[4693]: I1008 07:44:44.615053 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4b1ce098-43e7-44eb-8416-806097ba000e-inventory\") pod \"4b1ce098-43e7-44eb-8416-806097ba000e\" (UID: \"4b1ce098-43e7-44eb-8416-806097ba000e\") " Oct 08 07:44:44 crc kubenswrapper[4693]: I1008 07:44:44.615674 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4b1ce098-43e7-44eb-8416-806097ba000e-ssh-key\") pod \"4b1ce098-43e7-44eb-8416-806097ba000e\" (UID: \"4b1ce098-43e7-44eb-8416-806097ba000e\") " Oct 08 07:44:44 crc kubenswrapper[4693]: I1008 07:44:44.615786 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4k6ns\" (UniqueName: \"kubernetes.io/projected/4b1ce098-43e7-44eb-8416-806097ba000e-kube-api-access-4k6ns\") pod \"4b1ce098-43e7-44eb-8416-806097ba000e\" (UID: \"4b1ce098-43e7-44eb-8416-806097ba000e\") " Oct 08 07:44:44 crc kubenswrapper[4693]: I1008 07:44:44.621477 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b1ce098-43e7-44eb-8416-806097ba000e-kube-api-access-4k6ns" (OuterVolumeSpecName: "kube-api-access-4k6ns") pod "4b1ce098-43e7-44eb-8416-806097ba000e" (UID: "4b1ce098-43e7-44eb-8416-806097ba000e"). InnerVolumeSpecName "kube-api-access-4k6ns". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:44:44 crc kubenswrapper[4693]: I1008 07:44:44.654028 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b1ce098-43e7-44eb-8416-806097ba000e-inventory" (OuterVolumeSpecName: "inventory") pod "4b1ce098-43e7-44eb-8416-806097ba000e" (UID: "4b1ce098-43e7-44eb-8416-806097ba000e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:44:44 crc kubenswrapper[4693]: I1008 07:44:44.664315 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b1ce098-43e7-44eb-8416-806097ba000e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4b1ce098-43e7-44eb-8416-806097ba000e" (UID: "4b1ce098-43e7-44eb-8416-806097ba000e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:44:44 crc kubenswrapper[4693]: I1008 07:44:44.717740 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4k6ns\" (UniqueName: \"kubernetes.io/projected/4b1ce098-43e7-44eb-8416-806097ba000e-kube-api-access-4k6ns\") on node \"crc\" DevicePath \"\"" Oct 08 07:44:44 crc kubenswrapper[4693]: I1008 07:44:44.717769 4693 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4b1ce098-43e7-44eb-8416-806097ba000e-inventory\") on node \"crc\" DevicePath \"\"" Oct 08 07:44:44 crc kubenswrapper[4693]: I1008 07:44:44.717783 4693 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4b1ce098-43e7-44eb-8416-806097ba000e-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 08 07:44:45 crc kubenswrapper[4693]: I1008 07:44:45.001466 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz" event={"ID":"4b1ce098-43e7-44eb-8416-806097ba000e","Type":"ContainerDied","Data":"4349001b95cd527aa7580588444f46a8b1c3ad0f6f0ab758bc7bb6b17b8eb519"} Oct 08 07:44:45 crc kubenswrapper[4693]: I1008 07:44:45.001503 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4349001b95cd527aa7580588444f46a8b1c3ad0f6f0ab758bc7bb6b17b8eb519" Oct 08 07:44:45 crc kubenswrapper[4693]: I1008 07:44:45.001559 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz" Oct 08 07:44:45 crc kubenswrapper[4693]: I1008 07:44:45.107342 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-8r5hs"] Oct 08 07:44:45 crc kubenswrapper[4693]: E1008 07:44:45.107808 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b1ce098-43e7-44eb-8416-806097ba000e" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 08 07:44:45 crc kubenswrapper[4693]: I1008 07:44:45.107844 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b1ce098-43e7-44eb-8416-806097ba000e" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 08 07:44:45 crc kubenswrapper[4693]: I1008 07:44:45.108206 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b1ce098-43e7-44eb-8416-806097ba000e" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 08 07:44:45 crc kubenswrapper[4693]: I1008 07:44:45.109260 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8r5hs" Oct 08 07:44:45 crc kubenswrapper[4693]: I1008 07:44:45.115209 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zxm2d" Oct 08 07:44:45 crc kubenswrapper[4693]: I1008 07:44:45.115402 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 08 07:44:45 crc kubenswrapper[4693]: I1008 07:44:45.115640 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 08 07:44:45 crc kubenswrapper[4693]: I1008 07:44:45.117222 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 08 07:44:45 crc kubenswrapper[4693]: I1008 07:44:45.119991 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-8r5hs"] Oct 08 07:44:45 crc kubenswrapper[4693]: I1008 07:44:45.147019 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e8c4dd95-f13a-4479-99a8-9ea12766ac48-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-8r5hs\" (UID: \"e8c4dd95-f13a-4479-99a8-9ea12766ac48\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8r5hs" Oct 08 07:44:45 crc kubenswrapper[4693]: I1008 07:44:45.147141 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4r4k\" (UniqueName: \"kubernetes.io/projected/e8c4dd95-f13a-4479-99a8-9ea12766ac48-kube-api-access-d4r4k\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-8r5hs\" (UID: \"e8c4dd95-f13a-4479-99a8-9ea12766ac48\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8r5hs" Oct 08 07:44:45 crc kubenswrapper[4693]: I1008 07:44:45.147992 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e8c4dd95-f13a-4479-99a8-9ea12766ac48-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-8r5hs\" (UID: \"e8c4dd95-f13a-4479-99a8-9ea12766ac48\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8r5hs" Oct 08 07:44:45 crc kubenswrapper[4693]: I1008 07:44:45.248545 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4r4k\" (UniqueName: \"kubernetes.io/projected/e8c4dd95-f13a-4479-99a8-9ea12766ac48-kube-api-access-d4r4k\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-8r5hs\" (UID: \"e8c4dd95-f13a-4479-99a8-9ea12766ac48\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8r5hs" Oct 08 07:44:45 crc kubenswrapper[4693]: I1008 07:44:45.248635 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e8c4dd95-f13a-4479-99a8-9ea12766ac48-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-8r5hs\" (UID: \"e8c4dd95-f13a-4479-99a8-9ea12766ac48\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8r5hs" Oct 08 07:44:45 crc kubenswrapper[4693]: I1008 07:44:45.248686 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e8c4dd95-f13a-4479-99a8-9ea12766ac48-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-8r5hs\" (UID: \"e8c4dd95-f13a-4479-99a8-9ea12766ac48\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8r5hs" Oct 08 07:44:45 crc kubenswrapper[4693]: I1008 07:44:45.255984 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e8c4dd95-f13a-4479-99a8-9ea12766ac48-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-8r5hs\" (UID: \"e8c4dd95-f13a-4479-99a8-9ea12766ac48\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8r5hs" Oct 08 07:44:45 crc kubenswrapper[4693]: I1008 07:44:45.256317 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e8c4dd95-f13a-4479-99a8-9ea12766ac48-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-8r5hs\" (UID: \"e8c4dd95-f13a-4479-99a8-9ea12766ac48\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8r5hs" Oct 08 07:44:45 crc kubenswrapper[4693]: I1008 07:44:45.266343 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4r4k\" (UniqueName: \"kubernetes.io/projected/e8c4dd95-f13a-4479-99a8-9ea12766ac48-kube-api-access-d4r4k\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-8r5hs\" (UID: \"e8c4dd95-f13a-4479-99a8-9ea12766ac48\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8r5hs" Oct 08 07:44:45 crc kubenswrapper[4693]: I1008 07:44:45.433923 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8r5hs" Oct 08 07:44:45 crc kubenswrapper[4693]: I1008 07:44:45.990152 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-8r5hs"] Oct 08 07:44:46 crc kubenswrapper[4693]: I1008 07:44:46.013869 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8r5hs" event={"ID":"e8c4dd95-f13a-4479-99a8-9ea12766ac48","Type":"ContainerStarted","Data":"0c26578b141fe4477241e88b43370a3835ab85f1d3c3ad510909f54d846ba722"} Oct 08 07:44:47 crc kubenswrapper[4693]: I1008 07:44:47.023048 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8r5hs" event={"ID":"e8c4dd95-f13a-4479-99a8-9ea12766ac48","Type":"ContainerStarted","Data":"ac02396a85c08477ecc9e9c75404d4e16a7939d068086c8a11d716473cb91616"} Oct 08 07:44:50 crc kubenswrapper[4693]: I1008 07:44:50.042014 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8r5hs" podStartSLOduration=4.587896576 podStartE2EDuration="5.04199604s" podCreationTimestamp="2025-10-08 07:44:45 +0000 UTC" firstStartedPulling="2025-10-08 07:44:45.990534215 +0000 UTC m=+1671.361515890" lastFinishedPulling="2025-10-08 07:44:46.444650419 +0000 UTC m=+1671.815615354" observedRunningTime="2025-10-08 07:44:47.04018883 +0000 UTC m=+1672.411153775" watchObservedRunningTime="2025-10-08 07:44:50.04199604 +0000 UTC m=+1675.412960975" Oct 08 07:44:50 crc kubenswrapper[4693]: I1008 07:44:50.047920 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-v5zq8"] Oct 08 07:44:50 crc kubenswrapper[4693]: I1008 07:44:50.064893 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-8wxwp"] Oct 08 07:44:50 crc kubenswrapper[4693]: I1008 07:44:50.074508 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-mg9lm"] Oct 08 07:44:50 crc kubenswrapper[4693]: I1008 07:44:50.091604 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-v5zq8"] Oct 08 07:44:50 crc kubenswrapper[4693]: I1008 07:44:50.101506 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-8wxwp"] Oct 08 07:44:50 crc kubenswrapper[4693]: I1008 07:44:50.109727 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-mg9lm"] Oct 08 07:44:51 crc kubenswrapper[4693]: I1008 07:44:51.372760 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77f1e46b-e47e-4b72-8e99-bffa5f5de30d" path="/var/lib/kubelet/pods/77f1e46b-e47e-4b72-8e99-bffa5f5de30d/volumes" Oct 08 07:44:51 crc kubenswrapper[4693]: I1008 07:44:51.373256 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82ff1555-b253-4980-afa9-ed42d9938ab7" path="/var/lib/kubelet/pods/82ff1555-b253-4980-afa9-ed42d9938ab7/volumes" Oct 08 07:44:51 crc kubenswrapper[4693]: I1008 07:44:51.373717 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9942b69-54ad-4bf9-980b-45255f2e31a5" path="/var/lib/kubelet/pods/e9942b69-54ad-4bf9-980b-45255f2e31a5/volumes" Oct 08 07:44:53 crc kubenswrapper[4693]: I1008 07:44:53.363234 4693 scope.go:117] "RemoveContainer" containerID="c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503" Oct 08 07:44:53 crc kubenswrapper[4693]: E1008 07:44:53.364211 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:45:00 crc kubenswrapper[4693]: I1008 07:45:00.147006 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331825-dds8z"] Oct 08 07:45:00 crc kubenswrapper[4693]: I1008 07:45:00.148749 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331825-dds8z" Oct 08 07:45:00 crc kubenswrapper[4693]: I1008 07:45:00.153442 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 08 07:45:00 crc kubenswrapper[4693]: I1008 07:45:00.153893 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 08 07:45:00 crc kubenswrapper[4693]: I1008 07:45:00.174794 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331825-dds8z"] Oct 08 07:45:00 crc kubenswrapper[4693]: I1008 07:45:00.259031 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d1ce5d02-f32e-4504-96a2-0e6eb1bb232e-config-volume\") pod \"collect-profiles-29331825-dds8z\" (UID: \"d1ce5d02-f32e-4504-96a2-0e6eb1bb232e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331825-dds8z" Oct 08 07:45:00 crc kubenswrapper[4693]: I1008 07:45:00.259117 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m49zt\" (UniqueName: \"kubernetes.io/projected/d1ce5d02-f32e-4504-96a2-0e6eb1bb232e-kube-api-access-m49zt\") pod \"collect-profiles-29331825-dds8z\" (UID: \"d1ce5d02-f32e-4504-96a2-0e6eb1bb232e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331825-dds8z" Oct 08 07:45:00 crc kubenswrapper[4693]: I1008 07:45:00.259202 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d1ce5d02-f32e-4504-96a2-0e6eb1bb232e-secret-volume\") pod \"collect-profiles-29331825-dds8z\" (UID: \"d1ce5d02-f32e-4504-96a2-0e6eb1bb232e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331825-dds8z" Oct 08 07:45:00 crc kubenswrapper[4693]: I1008 07:45:00.361741 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d1ce5d02-f32e-4504-96a2-0e6eb1bb232e-config-volume\") pod \"collect-profiles-29331825-dds8z\" (UID: \"d1ce5d02-f32e-4504-96a2-0e6eb1bb232e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331825-dds8z" Oct 08 07:45:00 crc kubenswrapper[4693]: I1008 07:45:00.361871 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m49zt\" (UniqueName: \"kubernetes.io/projected/d1ce5d02-f32e-4504-96a2-0e6eb1bb232e-kube-api-access-m49zt\") pod \"collect-profiles-29331825-dds8z\" (UID: \"d1ce5d02-f32e-4504-96a2-0e6eb1bb232e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331825-dds8z" Oct 08 07:45:00 crc kubenswrapper[4693]: I1008 07:45:00.361942 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d1ce5d02-f32e-4504-96a2-0e6eb1bb232e-secret-volume\") pod \"collect-profiles-29331825-dds8z\" (UID: \"d1ce5d02-f32e-4504-96a2-0e6eb1bb232e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331825-dds8z" Oct 08 07:45:00 crc kubenswrapper[4693]: I1008 07:45:00.363101 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d1ce5d02-f32e-4504-96a2-0e6eb1bb232e-config-volume\") pod \"collect-profiles-29331825-dds8z\" (UID: \"d1ce5d02-f32e-4504-96a2-0e6eb1bb232e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331825-dds8z" Oct 08 07:45:00 crc kubenswrapper[4693]: I1008 07:45:00.371076 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d1ce5d02-f32e-4504-96a2-0e6eb1bb232e-secret-volume\") pod \"collect-profiles-29331825-dds8z\" (UID: \"d1ce5d02-f32e-4504-96a2-0e6eb1bb232e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331825-dds8z" Oct 08 07:45:00 crc kubenswrapper[4693]: I1008 07:45:00.381367 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m49zt\" (UniqueName: \"kubernetes.io/projected/d1ce5d02-f32e-4504-96a2-0e6eb1bb232e-kube-api-access-m49zt\") pod \"collect-profiles-29331825-dds8z\" (UID: \"d1ce5d02-f32e-4504-96a2-0e6eb1bb232e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331825-dds8z" Oct 08 07:45:00 crc kubenswrapper[4693]: I1008 07:45:00.475535 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331825-dds8z" Oct 08 07:45:00 crc kubenswrapper[4693]: I1008 07:45:00.937894 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331825-dds8z"] Oct 08 07:45:00 crc kubenswrapper[4693]: W1008 07:45:00.939408 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1ce5d02_f32e_4504_96a2_0e6eb1bb232e.slice/crio-9ff1cd42fa14c791edce407b2ce92b45c70e7f31363551da2d60c69bcc6a805b WatchSource:0}: Error finding container 9ff1cd42fa14c791edce407b2ce92b45c70e7f31363551da2d60c69bcc6a805b: Status 404 returned error can't find the container with id 9ff1cd42fa14c791edce407b2ce92b45c70e7f31363551da2d60c69bcc6a805b Oct 08 07:45:01 crc kubenswrapper[4693]: I1008 07:45:01.177651 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331825-dds8z" event={"ID":"d1ce5d02-f32e-4504-96a2-0e6eb1bb232e","Type":"ContainerStarted","Data":"8d6da0e8c409e9a2862db50d249000031a8cbd25db94958d3f2ebfae94fdd890"} Oct 08 07:45:01 crc kubenswrapper[4693]: I1008 07:45:01.177961 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331825-dds8z" event={"ID":"d1ce5d02-f32e-4504-96a2-0e6eb1bb232e","Type":"ContainerStarted","Data":"9ff1cd42fa14c791edce407b2ce92b45c70e7f31363551da2d60c69bcc6a805b"} Oct 08 07:45:01 crc kubenswrapper[4693]: I1008 07:45:01.197369 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29331825-dds8z" podStartSLOduration=1.197352335 podStartE2EDuration="1.197352335s" podCreationTimestamp="2025-10-08 07:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 07:45:01.193261009 +0000 UTC m=+1686.564225944" watchObservedRunningTime="2025-10-08 07:45:01.197352335 +0000 UTC m=+1686.568317270" Oct 08 07:45:02 crc kubenswrapper[4693]: I1008 07:45:02.219561 4693 generic.go:334] "Generic (PLEG): container finished" podID="d1ce5d02-f32e-4504-96a2-0e6eb1bb232e" containerID="8d6da0e8c409e9a2862db50d249000031a8cbd25db94958d3f2ebfae94fdd890" exitCode=0 Oct 08 07:45:02 crc kubenswrapper[4693]: I1008 07:45:02.219630 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331825-dds8z" event={"ID":"d1ce5d02-f32e-4504-96a2-0e6eb1bb232e","Type":"ContainerDied","Data":"8d6da0e8c409e9a2862db50d249000031a8cbd25db94958d3f2ebfae94fdd890"} Oct 08 07:45:03 crc kubenswrapper[4693]: I1008 07:45:03.564607 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331825-dds8z" Oct 08 07:45:03 crc kubenswrapper[4693]: I1008 07:45:03.635630 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d1ce5d02-f32e-4504-96a2-0e6eb1bb232e-config-volume\") pod \"d1ce5d02-f32e-4504-96a2-0e6eb1bb232e\" (UID: \"d1ce5d02-f32e-4504-96a2-0e6eb1bb232e\") " Oct 08 07:45:03 crc kubenswrapper[4693]: I1008 07:45:03.635784 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m49zt\" (UniqueName: \"kubernetes.io/projected/d1ce5d02-f32e-4504-96a2-0e6eb1bb232e-kube-api-access-m49zt\") pod \"d1ce5d02-f32e-4504-96a2-0e6eb1bb232e\" (UID: \"d1ce5d02-f32e-4504-96a2-0e6eb1bb232e\") " Oct 08 07:45:03 crc kubenswrapper[4693]: I1008 07:45:03.635849 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d1ce5d02-f32e-4504-96a2-0e6eb1bb232e-secret-volume\") pod \"d1ce5d02-f32e-4504-96a2-0e6eb1bb232e\" (UID: \"d1ce5d02-f32e-4504-96a2-0e6eb1bb232e\") " Oct 08 07:45:03 crc kubenswrapper[4693]: I1008 07:45:03.636498 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1ce5d02-f32e-4504-96a2-0e6eb1bb232e-config-volume" (OuterVolumeSpecName: "config-volume") pod "d1ce5d02-f32e-4504-96a2-0e6eb1bb232e" (UID: "d1ce5d02-f32e-4504-96a2-0e6eb1bb232e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:45:03 crc kubenswrapper[4693]: I1008 07:45:03.642155 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1ce5d02-f32e-4504-96a2-0e6eb1bb232e-kube-api-access-m49zt" (OuterVolumeSpecName: "kube-api-access-m49zt") pod "d1ce5d02-f32e-4504-96a2-0e6eb1bb232e" (UID: "d1ce5d02-f32e-4504-96a2-0e6eb1bb232e"). InnerVolumeSpecName "kube-api-access-m49zt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:45:03 crc kubenswrapper[4693]: I1008 07:45:03.643488 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1ce5d02-f32e-4504-96a2-0e6eb1bb232e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d1ce5d02-f32e-4504-96a2-0e6eb1bb232e" (UID: "d1ce5d02-f32e-4504-96a2-0e6eb1bb232e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:45:03 crc kubenswrapper[4693]: I1008 07:45:03.737889 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m49zt\" (UniqueName: \"kubernetes.io/projected/d1ce5d02-f32e-4504-96a2-0e6eb1bb232e-kube-api-access-m49zt\") on node \"crc\" DevicePath \"\"" Oct 08 07:45:03 crc kubenswrapper[4693]: I1008 07:45:03.737924 4693 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d1ce5d02-f32e-4504-96a2-0e6eb1bb232e-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 08 07:45:03 crc kubenswrapper[4693]: I1008 07:45:03.737935 4693 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d1ce5d02-f32e-4504-96a2-0e6eb1bb232e-config-volume\") on node \"crc\" DevicePath \"\"" Oct 08 07:45:04 crc kubenswrapper[4693]: I1008 07:45:04.247208 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331825-dds8z" event={"ID":"d1ce5d02-f32e-4504-96a2-0e6eb1bb232e","Type":"ContainerDied","Data":"9ff1cd42fa14c791edce407b2ce92b45c70e7f31363551da2d60c69bcc6a805b"} Oct 08 07:45:04 crc kubenswrapper[4693]: I1008 07:45:04.247258 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ff1cd42fa14c791edce407b2ce92b45c70e7f31363551da2d60c69bcc6a805b" Oct 08 07:45:04 crc kubenswrapper[4693]: I1008 07:45:04.247328 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331825-dds8z" Oct 08 07:45:05 crc kubenswrapper[4693]: I1008 07:45:05.046672 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-8a18-account-create-5qb8q"] Oct 08 07:45:05 crc kubenswrapper[4693]: I1008 07:45:05.057473 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-9fa1-account-create-f6sgs"] Oct 08 07:45:05 crc kubenswrapper[4693]: I1008 07:45:05.072639 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-9fa1-account-create-f6sgs"] Oct 08 07:45:05 crc kubenswrapper[4693]: I1008 07:45:05.080544 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-8a18-account-create-5qb8q"] Oct 08 07:45:05 crc kubenswrapper[4693]: I1008 07:45:05.090723 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-bd05-account-create-vtqfj"] Oct 08 07:45:05 crc kubenswrapper[4693]: I1008 07:45:05.097940 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-bd05-account-create-vtqfj"] Oct 08 07:45:05 crc kubenswrapper[4693]: I1008 07:45:05.381925 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f2182c2-a3ca-408b-9a70-876c78d382e4" path="/var/lib/kubelet/pods/5f2182c2-a3ca-408b-9a70-876c78d382e4/volumes" Oct 08 07:45:05 crc kubenswrapper[4693]: I1008 07:45:05.383446 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6477ef9b-7553-43d9-ae63-d705ffad2213" path="/var/lib/kubelet/pods/6477ef9b-7553-43d9-ae63-d705ffad2213/volumes" Oct 08 07:45:05 crc kubenswrapper[4693]: I1008 07:45:05.384924 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5e77527-c5f6-4ec3-8a18-b2ec14900f8f" path="/var/lib/kubelet/pods/e5e77527-c5f6-4ec3-8a18-b2ec14900f8f/volumes" Oct 08 07:45:07 crc kubenswrapper[4693]: I1008 07:45:07.364074 4693 scope.go:117] "RemoveContainer" containerID="c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503" Oct 08 07:45:07 crc kubenswrapper[4693]: E1008 07:45:07.364566 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:45:18 crc kubenswrapper[4693]: I1008 07:45:18.137261 4693 scope.go:117] "RemoveContainer" containerID="dedcbd4b017d22787241d60debd5f0f0e6fa2a7ef1c79674eea458c142ad9da1" Oct 08 07:45:18 crc kubenswrapper[4693]: I1008 07:45:18.187859 4693 scope.go:117] "RemoveContainer" containerID="3045375e561941cbe2f47c2d4cd049225f160b6869463480e2d0725249d1ebdb" Oct 08 07:45:18 crc kubenswrapper[4693]: I1008 07:45:18.234638 4693 scope.go:117] "RemoveContainer" containerID="01c447124f95edb09201e26ba5e748f3e1601b1d4adc8c6a08bfa90aa19f0b36" Oct 08 07:45:18 crc kubenswrapper[4693]: I1008 07:45:18.292895 4693 scope.go:117] "RemoveContainer" containerID="bcec3ce3e35c44436bd8067fbfd3bb339af7a665c51d5667a115ae263a971667" Oct 08 07:45:18 crc kubenswrapper[4693]: I1008 07:45:18.332519 4693 scope.go:117] "RemoveContainer" containerID="5e06bceb874defda4b53eb91fb0003f2718170972f149a667b6a3e2f1331fa09" Oct 08 07:45:18 crc kubenswrapper[4693]: I1008 07:45:18.362701 4693 scope.go:117] "RemoveContainer" containerID="c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503" Oct 08 07:45:18 crc kubenswrapper[4693]: E1008 07:45:18.363043 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:45:18 crc kubenswrapper[4693]: I1008 07:45:18.396130 4693 scope.go:117] "RemoveContainer" containerID="9441edb5fda88749d2822abe6a39380dd6c6b29bb7821f84e912151952e6e632" Oct 08 07:45:27 crc kubenswrapper[4693]: I1008 07:45:27.063537 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-n9c4r"] Oct 08 07:45:27 crc kubenswrapper[4693]: I1008 07:45:27.075399 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-n9c4r"] Oct 08 07:45:27 crc kubenswrapper[4693]: I1008 07:45:27.377542 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3" path="/var/lib/kubelet/pods/6c54d1ab-4c95-4e0c-96c5-d5bef5310ef3/volumes" Oct 08 07:45:30 crc kubenswrapper[4693]: I1008 07:45:30.502744 4693 generic.go:334] "Generic (PLEG): container finished" podID="e8c4dd95-f13a-4479-99a8-9ea12766ac48" containerID="ac02396a85c08477ecc9e9c75404d4e16a7939d068086c8a11d716473cb91616" exitCode=0 Oct 08 07:45:30 crc kubenswrapper[4693]: I1008 07:45:30.502859 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8r5hs" event={"ID":"e8c4dd95-f13a-4479-99a8-9ea12766ac48","Type":"ContainerDied","Data":"ac02396a85c08477ecc9e9c75404d4e16a7939d068086c8a11d716473cb91616"} Oct 08 07:45:31 crc kubenswrapper[4693]: I1008 07:45:31.972324 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8r5hs" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.093395 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4r4k\" (UniqueName: \"kubernetes.io/projected/e8c4dd95-f13a-4479-99a8-9ea12766ac48-kube-api-access-d4r4k\") pod \"e8c4dd95-f13a-4479-99a8-9ea12766ac48\" (UID: \"e8c4dd95-f13a-4479-99a8-9ea12766ac48\") " Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.093883 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e8c4dd95-f13a-4479-99a8-9ea12766ac48-ssh-key\") pod \"e8c4dd95-f13a-4479-99a8-9ea12766ac48\" (UID: \"e8c4dd95-f13a-4479-99a8-9ea12766ac48\") " Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.094165 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e8c4dd95-f13a-4479-99a8-9ea12766ac48-inventory\") pod \"e8c4dd95-f13a-4479-99a8-9ea12766ac48\" (UID: \"e8c4dd95-f13a-4479-99a8-9ea12766ac48\") " Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.098595 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8c4dd95-f13a-4479-99a8-9ea12766ac48-kube-api-access-d4r4k" (OuterVolumeSpecName: "kube-api-access-d4r4k") pod "e8c4dd95-f13a-4479-99a8-9ea12766ac48" (UID: "e8c4dd95-f13a-4479-99a8-9ea12766ac48"). InnerVolumeSpecName "kube-api-access-d4r4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.123058 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8c4dd95-f13a-4479-99a8-9ea12766ac48-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e8c4dd95-f13a-4479-99a8-9ea12766ac48" (UID: "e8c4dd95-f13a-4479-99a8-9ea12766ac48"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.134959 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8c4dd95-f13a-4479-99a8-9ea12766ac48-inventory" (OuterVolumeSpecName: "inventory") pod "e8c4dd95-f13a-4479-99a8-9ea12766ac48" (UID: "e8c4dd95-f13a-4479-99a8-9ea12766ac48"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.196264 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4r4k\" (UniqueName: \"kubernetes.io/projected/e8c4dd95-f13a-4479-99a8-9ea12766ac48-kube-api-access-d4r4k\") on node \"crc\" DevicePath \"\"" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.196300 4693 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e8c4dd95-f13a-4479-99a8-9ea12766ac48-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.196309 4693 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e8c4dd95-f13a-4479-99a8-9ea12766ac48-inventory\") on node \"crc\" DevicePath \"\"" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.363136 4693 scope.go:117] "RemoveContainer" containerID="c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503" Oct 08 07:45:32 crc kubenswrapper[4693]: E1008 07:45:32.363527 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.523157 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8r5hs" event={"ID":"e8c4dd95-f13a-4479-99a8-9ea12766ac48","Type":"ContainerDied","Data":"0c26578b141fe4477241e88b43370a3835ab85f1d3c3ad510909f54d846ba722"} Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.523211 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0c26578b141fe4477241e88b43370a3835ab85f1d3c3ad510909f54d846ba722" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.523553 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8r5hs" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.631649 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xf42b"] Oct 08 07:45:32 crc kubenswrapper[4693]: E1008 07:45:32.632073 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8c4dd95-f13a-4479-99a8-9ea12766ac48" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.632092 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8c4dd95-f13a-4479-99a8-9ea12766ac48" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 08 07:45:32 crc kubenswrapper[4693]: E1008 07:45:32.632106 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1ce5d02-f32e-4504-96a2-0e6eb1bb232e" containerName="collect-profiles" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.632112 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1ce5d02-f32e-4504-96a2-0e6eb1bb232e" containerName="collect-profiles" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.632300 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1ce5d02-f32e-4504-96a2-0e6eb1bb232e" containerName="collect-profiles" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.632314 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8c4dd95-f13a-4479-99a8-9ea12766ac48" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.633066 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xf42b" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.635446 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zxm2d" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.635446 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.635966 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.636492 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.646604 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xf42b"] Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.805774 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llfgr\" (UniqueName: \"kubernetes.io/projected/06bebec7-3818-42dc-b357-7ef2ea40a463-kube-api-access-llfgr\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xf42b\" (UID: \"06bebec7-3818-42dc-b357-7ef2ea40a463\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xf42b" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.806833 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/06bebec7-3818-42dc-b357-7ef2ea40a463-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xf42b\" (UID: \"06bebec7-3818-42dc-b357-7ef2ea40a463\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xf42b" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.806861 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/06bebec7-3818-42dc-b357-7ef2ea40a463-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xf42b\" (UID: \"06bebec7-3818-42dc-b357-7ef2ea40a463\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xf42b" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.909573 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llfgr\" (UniqueName: \"kubernetes.io/projected/06bebec7-3818-42dc-b357-7ef2ea40a463-kube-api-access-llfgr\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xf42b\" (UID: \"06bebec7-3818-42dc-b357-7ef2ea40a463\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xf42b" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.909781 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/06bebec7-3818-42dc-b357-7ef2ea40a463-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xf42b\" (UID: \"06bebec7-3818-42dc-b357-7ef2ea40a463\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xf42b" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.909850 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/06bebec7-3818-42dc-b357-7ef2ea40a463-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xf42b\" (UID: \"06bebec7-3818-42dc-b357-7ef2ea40a463\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xf42b" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.917067 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/06bebec7-3818-42dc-b357-7ef2ea40a463-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xf42b\" (UID: \"06bebec7-3818-42dc-b357-7ef2ea40a463\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xf42b" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.917654 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/06bebec7-3818-42dc-b357-7ef2ea40a463-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xf42b\" (UID: \"06bebec7-3818-42dc-b357-7ef2ea40a463\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xf42b" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.939006 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llfgr\" (UniqueName: \"kubernetes.io/projected/06bebec7-3818-42dc-b357-7ef2ea40a463-kube-api-access-llfgr\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xf42b\" (UID: \"06bebec7-3818-42dc-b357-7ef2ea40a463\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xf42b" Oct 08 07:45:32 crc kubenswrapper[4693]: I1008 07:45:32.953553 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xf42b" Oct 08 07:45:33 crc kubenswrapper[4693]: W1008 07:45:33.409402 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod06bebec7_3818_42dc_b357_7ef2ea40a463.slice/crio-215396ec9c7fdf2a4e05a8495ed0dcaa08b46602ca14fc4cc60ffa3b5f942a32 WatchSource:0}: Error finding container 215396ec9c7fdf2a4e05a8495ed0dcaa08b46602ca14fc4cc60ffa3b5f942a32: Status 404 returned error can't find the container with id 215396ec9c7fdf2a4e05a8495ed0dcaa08b46602ca14fc4cc60ffa3b5f942a32 Oct 08 07:45:33 crc kubenswrapper[4693]: I1008 07:45:33.416449 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xf42b"] Oct 08 07:45:33 crc kubenswrapper[4693]: I1008 07:45:33.532216 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xf42b" event={"ID":"06bebec7-3818-42dc-b357-7ef2ea40a463","Type":"ContainerStarted","Data":"215396ec9c7fdf2a4e05a8495ed0dcaa08b46602ca14fc4cc60ffa3b5f942a32"} Oct 08 07:45:34 crc kubenswrapper[4693]: I1008 07:45:34.566345 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xf42b" event={"ID":"06bebec7-3818-42dc-b357-7ef2ea40a463","Type":"ContainerStarted","Data":"403ef4ba0ab1eecb1bc31ceca81748bef3e34c056ff5bd069c4456e6886c69c0"} Oct 08 07:45:34 crc kubenswrapper[4693]: I1008 07:45:34.584489 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xf42b" podStartSLOduration=2.050081124 podStartE2EDuration="2.584464473s" podCreationTimestamp="2025-10-08 07:45:32 +0000 UTC" firstStartedPulling="2025-10-08 07:45:33.418488973 +0000 UTC m=+1718.789453918" lastFinishedPulling="2025-10-08 07:45:33.952872332 +0000 UTC m=+1719.323837267" observedRunningTime="2025-10-08 07:45:34.582657806 +0000 UTC m=+1719.953622751" watchObservedRunningTime="2025-10-08 07:45:34.584464473 +0000 UTC m=+1719.955429448" Oct 08 07:45:43 crc kubenswrapper[4693]: I1008 07:45:43.363909 4693 scope.go:117] "RemoveContainer" containerID="c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503" Oct 08 07:45:43 crc kubenswrapper[4693]: E1008 07:45:43.364855 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:45:49 crc kubenswrapper[4693]: I1008 07:45:49.103346 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-69qw5"] Oct 08 07:45:49 crc kubenswrapper[4693]: I1008 07:45:49.115885 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-pp99v"] Oct 08 07:45:49 crc kubenswrapper[4693]: I1008 07:45:49.123068 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-69qw5"] Oct 08 07:45:49 crc kubenswrapper[4693]: I1008 07:45:49.132664 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-pp99v"] Oct 08 07:45:49 crc kubenswrapper[4693]: I1008 07:45:49.376457 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7338453-7794-4bcd-8204-a189c93b7606" path="/var/lib/kubelet/pods/c7338453-7794-4bcd-8204-a189c93b7606/volumes" Oct 08 07:45:49 crc kubenswrapper[4693]: I1008 07:45:49.378151 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d" path="/var/lib/kubelet/pods/f7f1acbf-f6b8-48e3-9cb9-bb337fdd4b9d/volumes" Oct 08 07:45:57 crc kubenswrapper[4693]: I1008 07:45:57.363757 4693 scope.go:117] "RemoveContainer" containerID="c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503" Oct 08 07:45:57 crc kubenswrapper[4693]: E1008 07:45:57.364779 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:46:11 crc kubenswrapper[4693]: I1008 07:46:11.364149 4693 scope.go:117] "RemoveContainer" containerID="c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503" Oct 08 07:46:11 crc kubenswrapper[4693]: E1008 07:46:11.365266 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:46:18 crc kubenswrapper[4693]: I1008 07:46:18.536974 4693 scope.go:117] "RemoveContainer" containerID="b77adafe28931ba499f21cd52f784a5e97252fbad0dbe0f6bb6ff3b6e4d5debe" Oct 08 07:46:18 crc kubenswrapper[4693]: I1008 07:46:18.594002 4693 scope.go:117] "RemoveContainer" containerID="e7f620b66bdb8ff42d4a7936a5ea2870d6de64371eafab136fa9455050004390" Oct 08 07:46:18 crc kubenswrapper[4693]: I1008 07:46:18.705568 4693 scope.go:117] "RemoveContainer" containerID="4a42a01382abaa6f0eff26fd468b5aaf43f96272176431a500b6c92b5123b8be" Oct 08 07:46:26 crc kubenswrapper[4693]: I1008 07:46:26.362852 4693 scope.go:117] "RemoveContainer" containerID="c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503" Oct 08 07:46:27 crc kubenswrapper[4693]: I1008 07:46:27.114048 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerStarted","Data":"ebfdbc64dc1ffeb4225c54ca810a22cc10ba58b76c91b45301c1195e461b6b73"} Oct 08 07:46:33 crc kubenswrapper[4693]: I1008 07:46:33.055721 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-mtk9c"] Oct 08 07:46:33 crc kubenswrapper[4693]: I1008 07:46:33.065465 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-mtk9c"] Oct 08 07:46:33 crc kubenswrapper[4693]: I1008 07:46:33.176783 4693 generic.go:334] "Generic (PLEG): container finished" podID="06bebec7-3818-42dc-b357-7ef2ea40a463" containerID="403ef4ba0ab1eecb1bc31ceca81748bef3e34c056ff5bd069c4456e6886c69c0" exitCode=2 Oct 08 07:46:33 crc kubenswrapper[4693]: I1008 07:46:33.176851 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xf42b" event={"ID":"06bebec7-3818-42dc-b357-7ef2ea40a463","Type":"ContainerDied","Data":"403ef4ba0ab1eecb1bc31ceca81748bef3e34c056ff5bd069c4456e6886c69c0"} Oct 08 07:46:33 crc kubenswrapper[4693]: I1008 07:46:33.377368 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a7c286f-f170-49ca-a7e6-9e4c411d4840" path="/var/lib/kubelet/pods/8a7c286f-f170-49ca-a7e6-9e4c411d4840/volumes" Oct 08 07:46:34 crc kubenswrapper[4693]: I1008 07:46:34.669305 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xf42b" Oct 08 07:46:34 crc kubenswrapper[4693]: I1008 07:46:34.824569 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-llfgr\" (UniqueName: \"kubernetes.io/projected/06bebec7-3818-42dc-b357-7ef2ea40a463-kube-api-access-llfgr\") pod \"06bebec7-3818-42dc-b357-7ef2ea40a463\" (UID: \"06bebec7-3818-42dc-b357-7ef2ea40a463\") " Oct 08 07:46:34 crc kubenswrapper[4693]: I1008 07:46:34.824667 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/06bebec7-3818-42dc-b357-7ef2ea40a463-inventory\") pod \"06bebec7-3818-42dc-b357-7ef2ea40a463\" (UID: \"06bebec7-3818-42dc-b357-7ef2ea40a463\") " Oct 08 07:46:34 crc kubenswrapper[4693]: I1008 07:46:34.824878 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/06bebec7-3818-42dc-b357-7ef2ea40a463-ssh-key\") pod \"06bebec7-3818-42dc-b357-7ef2ea40a463\" (UID: \"06bebec7-3818-42dc-b357-7ef2ea40a463\") " Oct 08 07:46:34 crc kubenswrapper[4693]: I1008 07:46:34.832461 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06bebec7-3818-42dc-b357-7ef2ea40a463-kube-api-access-llfgr" (OuterVolumeSpecName: "kube-api-access-llfgr") pod "06bebec7-3818-42dc-b357-7ef2ea40a463" (UID: "06bebec7-3818-42dc-b357-7ef2ea40a463"). InnerVolumeSpecName "kube-api-access-llfgr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:46:34 crc kubenswrapper[4693]: I1008 07:46:34.857899 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06bebec7-3818-42dc-b357-7ef2ea40a463-inventory" (OuterVolumeSpecName: "inventory") pod "06bebec7-3818-42dc-b357-7ef2ea40a463" (UID: "06bebec7-3818-42dc-b357-7ef2ea40a463"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:46:34 crc kubenswrapper[4693]: I1008 07:46:34.879911 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06bebec7-3818-42dc-b357-7ef2ea40a463-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "06bebec7-3818-42dc-b357-7ef2ea40a463" (UID: "06bebec7-3818-42dc-b357-7ef2ea40a463"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:46:34 crc kubenswrapper[4693]: I1008 07:46:34.927235 4693 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/06bebec7-3818-42dc-b357-7ef2ea40a463-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 08 07:46:34 crc kubenswrapper[4693]: I1008 07:46:34.927277 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-llfgr\" (UniqueName: \"kubernetes.io/projected/06bebec7-3818-42dc-b357-7ef2ea40a463-kube-api-access-llfgr\") on node \"crc\" DevicePath \"\"" Oct 08 07:46:34 crc kubenswrapper[4693]: I1008 07:46:34.927294 4693 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/06bebec7-3818-42dc-b357-7ef2ea40a463-inventory\") on node \"crc\" DevicePath \"\"" Oct 08 07:46:35 crc kubenswrapper[4693]: I1008 07:46:35.197159 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xf42b" event={"ID":"06bebec7-3818-42dc-b357-7ef2ea40a463","Type":"ContainerDied","Data":"215396ec9c7fdf2a4e05a8495ed0dcaa08b46602ca14fc4cc60ffa3b5f942a32"} Oct 08 07:46:35 crc kubenswrapper[4693]: I1008 07:46:35.197454 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="215396ec9c7fdf2a4e05a8495ed0dcaa08b46602ca14fc4cc60ffa3b5f942a32" Oct 08 07:46:35 crc kubenswrapper[4693]: I1008 07:46:35.197260 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xf42b" Oct 08 07:46:42 crc kubenswrapper[4693]: I1008 07:46:42.041001 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd"] Oct 08 07:46:42 crc kubenswrapper[4693]: E1008 07:46:42.042593 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06bebec7-3818-42dc-b357-7ef2ea40a463" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 08 07:46:42 crc kubenswrapper[4693]: I1008 07:46:42.042626 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="06bebec7-3818-42dc-b357-7ef2ea40a463" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 08 07:46:42 crc kubenswrapper[4693]: I1008 07:46:42.043154 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="06bebec7-3818-42dc-b357-7ef2ea40a463" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 08 07:46:42 crc kubenswrapper[4693]: I1008 07:46:42.044652 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd" Oct 08 07:46:42 crc kubenswrapper[4693]: I1008 07:46:42.047554 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zxm2d" Oct 08 07:46:42 crc kubenswrapper[4693]: I1008 07:46:42.047912 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 08 07:46:42 crc kubenswrapper[4693]: I1008 07:46:42.054940 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 08 07:46:42 crc kubenswrapper[4693]: I1008 07:46:42.055141 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 08 07:46:42 crc kubenswrapper[4693]: I1008 07:46:42.058101 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd"] Oct 08 07:46:42 crc kubenswrapper[4693]: I1008 07:46:42.186327 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eecc70b9-2687-499e-89e1-f2346e8088f6-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd\" (UID: \"eecc70b9-2687-499e-89e1-f2346e8088f6\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd" Oct 08 07:46:42 crc kubenswrapper[4693]: I1008 07:46:42.186423 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eecc70b9-2687-499e-89e1-f2346e8088f6-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd\" (UID: \"eecc70b9-2687-499e-89e1-f2346e8088f6\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd" Oct 08 07:46:42 crc kubenswrapper[4693]: I1008 07:46:42.186494 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f72jk\" (UniqueName: \"kubernetes.io/projected/eecc70b9-2687-499e-89e1-f2346e8088f6-kube-api-access-f72jk\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd\" (UID: \"eecc70b9-2687-499e-89e1-f2346e8088f6\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd" Oct 08 07:46:42 crc kubenswrapper[4693]: I1008 07:46:42.288428 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eecc70b9-2687-499e-89e1-f2346e8088f6-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd\" (UID: \"eecc70b9-2687-499e-89e1-f2346e8088f6\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd" Oct 08 07:46:42 crc kubenswrapper[4693]: I1008 07:46:42.288511 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eecc70b9-2687-499e-89e1-f2346e8088f6-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd\" (UID: \"eecc70b9-2687-499e-89e1-f2346e8088f6\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd" Oct 08 07:46:42 crc kubenswrapper[4693]: I1008 07:46:42.288581 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f72jk\" (UniqueName: \"kubernetes.io/projected/eecc70b9-2687-499e-89e1-f2346e8088f6-kube-api-access-f72jk\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd\" (UID: \"eecc70b9-2687-499e-89e1-f2346e8088f6\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd" Oct 08 07:46:42 crc kubenswrapper[4693]: I1008 07:46:42.294575 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eecc70b9-2687-499e-89e1-f2346e8088f6-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd\" (UID: \"eecc70b9-2687-499e-89e1-f2346e8088f6\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd" Oct 08 07:46:42 crc kubenswrapper[4693]: I1008 07:46:42.294595 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eecc70b9-2687-499e-89e1-f2346e8088f6-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd\" (UID: \"eecc70b9-2687-499e-89e1-f2346e8088f6\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd" Oct 08 07:46:42 crc kubenswrapper[4693]: I1008 07:46:42.304680 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f72jk\" (UniqueName: \"kubernetes.io/projected/eecc70b9-2687-499e-89e1-f2346e8088f6-kube-api-access-f72jk\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd\" (UID: \"eecc70b9-2687-499e-89e1-f2346e8088f6\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd" Oct 08 07:46:42 crc kubenswrapper[4693]: I1008 07:46:42.388523 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd" Oct 08 07:46:42 crc kubenswrapper[4693]: I1008 07:46:42.940400 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd"] Oct 08 07:46:42 crc kubenswrapper[4693]: W1008 07:46:42.945027 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeecc70b9_2687_499e_89e1_f2346e8088f6.slice/crio-1f341d0a9d89c69b02a055a2b96180485e011d9e2a4b70f93b475a45a89c018c WatchSource:0}: Error finding container 1f341d0a9d89c69b02a055a2b96180485e011d9e2a4b70f93b475a45a89c018c: Status 404 returned error can't find the container with id 1f341d0a9d89c69b02a055a2b96180485e011d9e2a4b70f93b475a45a89c018c Oct 08 07:46:43 crc kubenswrapper[4693]: I1008 07:46:43.289392 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd" event={"ID":"eecc70b9-2687-499e-89e1-f2346e8088f6","Type":"ContainerStarted","Data":"1f341d0a9d89c69b02a055a2b96180485e011d9e2a4b70f93b475a45a89c018c"} Oct 08 07:46:44 crc kubenswrapper[4693]: I1008 07:46:44.318388 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd" event={"ID":"eecc70b9-2687-499e-89e1-f2346e8088f6","Type":"ContainerStarted","Data":"14dc903f96753cb126455b7aca349919de474c02456d322ac61454bc9c1b125d"} Oct 08 07:46:44 crc kubenswrapper[4693]: I1008 07:46:44.357505 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd" podStartSLOduration=1.6351093639999998 podStartE2EDuration="2.357476779s" podCreationTimestamp="2025-10-08 07:46:42 +0000 UTC" firstStartedPulling="2025-10-08 07:46:42.947388237 +0000 UTC m=+1788.318353182" lastFinishedPulling="2025-10-08 07:46:43.669755622 +0000 UTC m=+1789.040720597" observedRunningTime="2025-10-08 07:46:44.342410208 +0000 UTC m=+1789.713375143" watchObservedRunningTime="2025-10-08 07:46:44.357476779 +0000 UTC m=+1789.728441744" Oct 08 07:47:18 crc kubenswrapper[4693]: I1008 07:47:18.796842 4693 scope.go:117] "RemoveContainer" containerID="cef8afda4aec447b6f3834ebd155e491e040a40f584e5d1a87dbb6d1624c6eb2" Oct 08 07:47:35 crc kubenswrapper[4693]: I1008 07:47:35.888397 4693 generic.go:334] "Generic (PLEG): container finished" podID="eecc70b9-2687-499e-89e1-f2346e8088f6" containerID="14dc903f96753cb126455b7aca349919de474c02456d322ac61454bc9c1b125d" exitCode=0 Oct 08 07:47:35 crc kubenswrapper[4693]: I1008 07:47:35.888476 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd" event={"ID":"eecc70b9-2687-499e-89e1-f2346e8088f6","Type":"ContainerDied","Data":"14dc903f96753cb126455b7aca349919de474c02456d322ac61454bc9c1b125d"} Oct 08 07:47:37 crc kubenswrapper[4693]: I1008 07:47:37.368958 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd" Oct 08 07:47:37 crc kubenswrapper[4693]: I1008 07:47:37.477365 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f72jk\" (UniqueName: \"kubernetes.io/projected/eecc70b9-2687-499e-89e1-f2346e8088f6-kube-api-access-f72jk\") pod \"eecc70b9-2687-499e-89e1-f2346e8088f6\" (UID: \"eecc70b9-2687-499e-89e1-f2346e8088f6\") " Oct 08 07:47:37 crc kubenswrapper[4693]: I1008 07:47:37.477427 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eecc70b9-2687-499e-89e1-f2346e8088f6-inventory\") pod \"eecc70b9-2687-499e-89e1-f2346e8088f6\" (UID: \"eecc70b9-2687-499e-89e1-f2346e8088f6\") " Oct 08 07:47:37 crc kubenswrapper[4693]: I1008 07:47:37.477474 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eecc70b9-2687-499e-89e1-f2346e8088f6-ssh-key\") pod \"eecc70b9-2687-499e-89e1-f2346e8088f6\" (UID: \"eecc70b9-2687-499e-89e1-f2346e8088f6\") " Oct 08 07:47:37 crc kubenswrapper[4693]: I1008 07:47:37.492987 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eecc70b9-2687-499e-89e1-f2346e8088f6-kube-api-access-f72jk" (OuterVolumeSpecName: "kube-api-access-f72jk") pod "eecc70b9-2687-499e-89e1-f2346e8088f6" (UID: "eecc70b9-2687-499e-89e1-f2346e8088f6"). InnerVolumeSpecName "kube-api-access-f72jk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:47:37 crc kubenswrapper[4693]: I1008 07:47:37.503613 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eecc70b9-2687-499e-89e1-f2346e8088f6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "eecc70b9-2687-499e-89e1-f2346e8088f6" (UID: "eecc70b9-2687-499e-89e1-f2346e8088f6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:47:37 crc kubenswrapper[4693]: I1008 07:47:37.504832 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eecc70b9-2687-499e-89e1-f2346e8088f6-inventory" (OuterVolumeSpecName: "inventory") pod "eecc70b9-2687-499e-89e1-f2346e8088f6" (UID: "eecc70b9-2687-499e-89e1-f2346e8088f6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:47:37 crc kubenswrapper[4693]: I1008 07:47:37.591442 4693 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eecc70b9-2687-499e-89e1-f2346e8088f6-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 08 07:47:37 crc kubenswrapper[4693]: I1008 07:47:37.591492 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f72jk\" (UniqueName: \"kubernetes.io/projected/eecc70b9-2687-499e-89e1-f2346e8088f6-kube-api-access-f72jk\") on node \"crc\" DevicePath \"\"" Oct 08 07:47:37 crc kubenswrapper[4693]: I1008 07:47:37.591508 4693 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eecc70b9-2687-499e-89e1-f2346e8088f6-inventory\") on node \"crc\" DevicePath \"\"" Oct 08 07:47:37 crc kubenswrapper[4693]: I1008 07:47:37.913063 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd" event={"ID":"eecc70b9-2687-499e-89e1-f2346e8088f6","Type":"ContainerDied","Data":"1f341d0a9d89c69b02a055a2b96180485e011d9e2a4b70f93b475a45a89c018c"} Oct 08 07:47:37 crc kubenswrapper[4693]: I1008 07:47:37.913137 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1f341d0a9d89c69b02a055a2b96180485e011d9e2a4b70f93b475a45a89c018c" Oct 08 07:47:37 crc kubenswrapper[4693]: I1008 07:47:37.913141 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd" Oct 08 07:47:38 crc kubenswrapper[4693]: I1008 07:47:38.011486 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-dj5jp"] Oct 08 07:47:38 crc kubenswrapper[4693]: E1008 07:47:38.011939 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eecc70b9-2687-499e-89e1-f2346e8088f6" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 08 07:47:38 crc kubenswrapper[4693]: I1008 07:47:38.011961 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="eecc70b9-2687-499e-89e1-f2346e8088f6" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 08 07:47:38 crc kubenswrapper[4693]: I1008 07:47:38.012201 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="eecc70b9-2687-499e-89e1-f2346e8088f6" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 08 07:47:38 crc kubenswrapper[4693]: I1008 07:47:38.013055 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-dj5jp" Oct 08 07:47:38 crc kubenswrapper[4693]: I1008 07:47:38.014862 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 08 07:47:38 crc kubenswrapper[4693]: I1008 07:47:38.014959 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 08 07:47:38 crc kubenswrapper[4693]: I1008 07:47:38.015805 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 08 07:47:38 crc kubenswrapper[4693]: I1008 07:47:38.026397 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zxm2d" Oct 08 07:47:38 crc kubenswrapper[4693]: I1008 07:47:38.029539 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-dj5jp"] Oct 08 07:47:38 crc kubenswrapper[4693]: I1008 07:47:38.101931 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-dj5jp\" (UID: \"ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399\") " pod="openstack/ssh-known-hosts-edpm-deployment-dj5jp" Oct 08 07:47:38 crc kubenswrapper[4693]: I1008 07:47:38.102287 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-dj5jp\" (UID: \"ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399\") " pod="openstack/ssh-known-hosts-edpm-deployment-dj5jp" Oct 08 07:47:38 crc kubenswrapper[4693]: I1008 07:47:38.102342 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlsbt\" (UniqueName: \"kubernetes.io/projected/ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399-kube-api-access-dlsbt\") pod \"ssh-known-hosts-edpm-deployment-dj5jp\" (UID: \"ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399\") " pod="openstack/ssh-known-hosts-edpm-deployment-dj5jp" Oct 08 07:47:38 crc kubenswrapper[4693]: I1008 07:47:38.204354 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-dj5jp\" (UID: \"ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399\") " pod="openstack/ssh-known-hosts-edpm-deployment-dj5jp" Oct 08 07:47:38 crc kubenswrapper[4693]: I1008 07:47:38.204482 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlsbt\" (UniqueName: \"kubernetes.io/projected/ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399-kube-api-access-dlsbt\") pod \"ssh-known-hosts-edpm-deployment-dj5jp\" (UID: \"ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399\") " pod="openstack/ssh-known-hosts-edpm-deployment-dj5jp" Oct 08 07:47:38 crc kubenswrapper[4693]: I1008 07:47:38.204660 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-dj5jp\" (UID: \"ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399\") " pod="openstack/ssh-known-hosts-edpm-deployment-dj5jp" Oct 08 07:47:38 crc kubenswrapper[4693]: I1008 07:47:38.211331 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-dj5jp\" (UID: \"ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399\") " pod="openstack/ssh-known-hosts-edpm-deployment-dj5jp" Oct 08 07:47:38 crc kubenswrapper[4693]: I1008 07:47:38.215003 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-dj5jp\" (UID: \"ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399\") " pod="openstack/ssh-known-hosts-edpm-deployment-dj5jp" Oct 08 07:47:38 crc kubenswrapper[4693]: I1008 07:47:38.237620 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlsbt\" (UniqueName: \"kubernetes.io/projected/ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399-kube-api-access-dlsbt\") pod \"ssh-known-hosts-edpm-deployment-dj5jp\" (UID: \"ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399\") " pod="openstack/ssh-known-hosts-edpm-deployment-dj5jp" Oct 08 07:47:38 crc kubenswrapper[4693]: I1008 07:47:38.355107 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-dj5jp" Oct 08 07:47:38 crc kubenswrapper[4693]: I1008 07:47:38.910179 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-dj5jp"] Oct 08 07:47:39 crc kubenswrapper[4693]: I1008 07:47:39.940068 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-dj5jp" event={"ID":"ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399","Type":"ContainerStarted","Data":"c57928666e1eecc5c003252696194f69310c086946ee9497b3aba570fcb1db07"} Oct 08 07:47:39 crc kubenswrapper[4693]: I1008 07:47:39.940435 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-dj5jp" event={"ID":"ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399","Type":"ContainerStarted","Data":"e34a461946c1de0932cbb6018edc09efbec2a0b5cac4bde100f4353bb97adab5"} Oct 08 07:47:39 crc kubenswrapper[4693]: I1008 07:47:39.959127 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-dj5jp" podStartSLOduration=2.556044429 podStartE2EDuration="2.959109123s" podCreationTimestamp="2025-10-08 07:47:37 +0000 UTC" firstStartedPulling="2025-10-08 07:47:38.918125064 +0000 UTC m=+1844.289089999" lastFinishedPulling="2025-10-08 07:47:39.321189758 +0000 UTC m=+1844.692154693" observedRunningTime="2025-10-08 07:47:39.956162117 +0000 UTC m=+1845.327127092" watchObservedRunningTime="2025-10-08 07:47:39.959109123 +0000 UTC m=+1845.330074058" Oct 08 07:47:48 crc kubenswrapper[4693]: I1008 07:47:48.022745 4693 generic.go:334] "Generic (PLEG): container finished" podID="ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399" containerID="c57928666e1eecc5c003252696194f69310c086946ee9497b3aba570fcb1db07" exitCode=0 Oct 08 07:47:48 crc kubenswrapper[4693]: I1008 07:47:48.023490 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-dj5jp" event={"ID":"ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399","Type":"ContainerDied","Data":"c57928666e1eecc5c003252696194f69310c086946ee9497b3aba570fcb1db07"} Oct 08 07:47:49 crc kubenswrapper[4693]: I1008 07:47:49.628061 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-dj5jp" Oct 08 07:47:49 crc kubenswrapper[4693]: I1008 07:47:49.745467 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dlsbt\" (UniqueName: \"kubernetes.io/projected/ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399-kube-api-access-dlsbt\") pod \"ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399\" (UID: \"ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399\") " Oct 08 07:47:49 crc kubenswrapper[4693]: I1008 07:47:49.745697 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399-inventory-0\") pod \"ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399\" (UID: \"ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399\") " Oct 08 07:47:49 crc kubenswrapper[4693]: I1008 07:47:49.745724 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399-ssh-key-openstack-edpm-ipam\") pod \"ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399\" (UID: \"ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399\") " Oct 08 07:47:49 crc kubenswrapper[4693]: I1008 07:47:49.760226 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399-kube-api-access-dlsbt" (OuterVolumeSpecName: "kube-api-access-dlsbt") pod "ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399" (UID: "ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399"). InnerVolumeSpecName "kube-api-access-dlsbt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:47:49 crc kubenswrapper[4693]: I1008 07:47:49.779886 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399" (UID: "ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:47:49 crc kubenswrapper[4693]: I1008 07:47:49.795060 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399" (UID: "ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:47:49 crc kubenswrapper[4693]: I1008 07:47:49.847924 4693 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 08 07:47:49 crc kubenswrapper[4693]: I1008 07:47:49.847984 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dlsbt\" (UniqueName: \"kubernetes.io/projected/ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399-kube-api-access-dlsbt\") on node \"crc\" DevicePath \"\"" Oct 08 07:47:49 crc kubenswrapper[4693]: I1008 07:47:49.847996 4693 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399-inventory-0\") on node \"crc\" DevicePath \"\"" Oct 08 07:47:50 crc kubenswrapper[4693]: I1008 07:47:50.046527 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-dj5jp" event={"ID":"ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399","Type":"ContainerDied","Data":"e34a461946c1de0932cbb6018edc09efbec2a0b5cac4bde100f4353bb97adab5"} Oct 08 07:47:50 crc kubenswrapper[4693]: I1008 07:47:50.046570 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-dj5jp" Oct 08 07:47:50 crc kubenswrapper[4693]: I1008 07:47:50.046571 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e34a461946c1de0932cbb6018edc09efbec2a0b5cac4bde100f4353bb97adab5" Oct 08 07:47:50 crc kubenswrapper[4693]: I1008 07:47:50.131449 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-c2gpk"] Oct 08 07:47:50 crc kubenswrapper[4693]: E1008 07:47:50.132161 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399" containerName="ssh-known-hosts-edpm-deployment" Oct 08 07:47:50 crc kubenswrapper[4693]: I1008 07:47:50.132176 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399" containerName="ssh-known-hosts-edpm-deployment" Oct 08 07:47:50 crc kubenswrapper[4693]: I1008 07:47:50.132431 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399" containerName="ssh-known-hosts-edpm-deployment" Oct 08 07:47:50 crc kubenswrapper[4693]: I1008 07:47:50.133175 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c2gpk" Oct 08 07:47:50 crc kubenswrapper[4693]: I1008 07:47:50.137259 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 08 07:47:50 crc kubenswrapper[4693]: I1008 07:47:50.137493 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 08 07:47:50 crc kubenswrapper[4693]: I1008 07:47:50.137648 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 08 07:47:50 crc kubenswrapper[4693]: I1008 07:47:50.137866 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zxm2d" Oct 08 07:47:50 crc kubenswrapper[4693]: I1008 07:47:50.143564 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-c2gpk"] Oct 08 07:47:50 crc kubenswrapper[4693]: I1008 07:47:50.257935 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c2gpk\" (UID: \"4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c2gpk" Oct 08 07:47:50 crc kubenswrapper[4693]: I1008 07:47:50.258238 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c2gpk\" (UID: \"4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c2gpk" Oct 08 07:47:50 crc kubenswrapper[4693]: I1008 07:47:50.258375 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9722m\" (UniqueName: \"kubernetes.io/projected/4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4-kube-api-access-9722m\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c2gpk\" (UID: \"4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c2gpk" Oct 08 07:47:50 crc kubenswrapper[4693]: I1008 07:47:50.359979 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c2gpk\" (UID: \"4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c2gpk" Oct 08 07:47:50 crc kubenswrapper[4693]: I1008 07:47:50.360281 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c2gpk\" (UID: \"4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c2gpk" Oct 08 07:47:50 crc kubenswrapper[4693]: I1008 07:47:50.360455 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9722m\" (UniqueName: \"kubernetes.io/projected/4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4-kube-api-access-9722m\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c2gpk\" (UID: \"4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c2gpk" Oct 08 07:47:50 crc kubenswrapper[4693]: I1008 07:47:50.365267 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c2gpk\" (UID: \"4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c2gpk" Oct 08 07:47:50 crc kubenswrapper[4693]: I1008 07:47:50.365521 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c2gpk\" (UID: \"4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c2gpk" Oct 08 07:47:50 crc kubenswrapper[4693]: I1008 07:47:50.392130 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9722m\" (UniqueName: \"kubernetes.io/projected/4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4-kube-api-access-9722m\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c2gpk\" (UID: \"4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c2gpk" Oct 08 07:47:50 crc kubenswrapper[4693]: I1008 07:47:50.455636 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c2gpk" Oct 08 07:47:51 crc kubenswrapper[4693]: I1008 07:47:51.080496 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-c2gpk"] Oct 08 07:47:52 crc kubenswrapper[4693]: I1008 07:47:52.073389 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c2gpk" event={"ID":"4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4","Type":"ContainerStarted","Data":"4717c384d8ea9e5e654aa677a9b6416dfe8a6b012070d738836366d4e3960c40"} Oct 08 07:47:52 crc kubenswrapper[4693]: I1008 07:47:52.073857 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c2gpk" event={"ID":"4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4","Type":"ContainerStarted","Data":"cb9b9ee6c841802586e42aee6987554248acf7e7ea7d9296c4b5fe745a222a40"} Oct 08 07:47:52 crc kubenswrapper[4693]: I1008 07:47:52.096126 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c2gpk" podStartSLOduration=1.5218738969999999 podStartE2EDuration="2.09609697s" podCreationTimestamp="2025-10-08 07:47:50 +0000 UTC" firstStartedPulling="2025-10-08 07:47:51.08518694 +0000 UTC m=+1856.456151915" lastFinishedPulling="2025-10-08 07:47:51.659410053 +0000 UTC m=+1857.030374988" observedRunningTime="2025-10-08 07:47:52.091327966 +0000 UTC m=+1857.462292941" watchObservedRunningTime="2025-10-08 07:47:52.09609697 +0000 UTC m=+1857.467061905" Oct 08 07:48:01 crc kubenswrapper[4693]: I1008 07:48:01.188355 4693 generic.go:334] "Generic (PLEG): container finished" podID="4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4" containerID="4717c384d8ea9e5e654aa677a9b6416dfe8a6b012070d738836366d4e3960c40" exitCode=0 Oct 08 07:48:01 crc kubenswrapper[4693]: I1008 07:48:01.188508 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c2gpk" event={"ID":"4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4","Type":"ContainerDied","Data":"4717c384d8ea9e5e654aa677a9b6416dfe8a6b012070d738836366d4e3960c40"} Oct 08 07:48:02 crc kubenswrapper[4693]: I1008 07:48:02.615773 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c2gpk" Oct 08 07:48:02 crc kubenswrapper[4693]: I1008 07:48:02.734057 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4-inventory\") pod \"4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4\" (UID: \"4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4\") " Oct 08 07:48:02 crc kubenswrapper[4693]: I1008 07:48:02.734227 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9722m\" (UniqueName: \"kubernetes.io/projected/4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4-kube-api-access-9722m\") pod \"4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4\" (UID: \"4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4\") " Oct 08 07:48:02 crc kubenswrapper[4693]: I1008 07:48:02.734429 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4-ssh-key\") pod \"4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4\" (UID: \"4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4\") " Oct 08 07:48:02 crc kubenswrapper[4693]: I1008 07:48:02.739526 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4-kube-api-access-9722m" (OuterVolumeSpecName: "kube-api-access-9722m") pod "4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4" (UID: "4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4"). InnerVolumeSpecName "kube-api-access-9722m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:48:02 crc kubenswrapper[4693]: I1008 07:48:02.762204 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4" (UID: "4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:48:02 crc kubenswrapper[4693]: I1008 07:48:02.762708 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4-inventory" (OuterVolumeSpecName: "inventory") pod "4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4" (UID: "4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:48:02 crc kubenswrapper[4693]: I1008 07:48:02.836205 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9722m\" (UniqueName: \"kubernetes.io/projected/4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4-kube-api-access-9722m\") on node \"crc\" DevicePath \"\"" Oct 08 07:48:02 crc kubenswrapper[4693]: I1008 07:48:02.836231 4693 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 08 07:48:02 crc kubenswrapper[4693]: I1008 07:48:02.836241 4693 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4-inventory\") on node \"crc\" DevicePath \"\"" Oct 08 07:48:03 crc kubenswrapper[4693]: I1008 07:48:03.212583 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c2gpk" event={"ID":"4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4","Type":"ContainerDied","Data":"cb9b9ee6c841802586e42aee6987554248acf7e7ea7d9296c4b5fe745a222a40"} Oct 08 07:48:03 crc kubenswrapper[4693]: I1008 07:48:03.212639 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb9b9ee6c841802586e42aee6987554248acf7e7ea7d9296c4b5fe745a222a40" Oct 08 07:48:03 crc kubenswrapper[4693]: I1008 07:48:03.212669 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c2gpk" Oct 08 07:48:03 crc kubenswrapper[4693]: I1008 07:48:03.306015 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg"] Oct 08 07:48:03 crc kubenswrapper[4693]: E1008 07:48:03.306656 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 08 07:48:03 crc kubenswrapper[4693]: I1008 07:48:03.306754 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 08 07:48:03 crc kubenswrapper[4693]: I1008 07:48:03.307054 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 08 07:48:03 crc kubenswrapper[4693]: I1008 07:48:03.308691 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg" Oct 08 07:48:03 crc kubenswrapper[4693]: I1008 07:48:03.318979 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 08 07:48:03 crc kubenswrapper[4693]: I1008 07:48:03.318989 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 08 07:48:03 crc kubenswrapper[4693]: I1008 07:48:03.321899 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 08 07:48:03 crc kubenswrapper[4693]: I1008 07:48:03.326368 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg"] Oct 08 07:48:03 crc kubenswrapper[4693]: I1008 07:48:03.327577 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zxm2d" Oct 08 07:48:03 crc kubenswrapper[4693]: I1008 07:48:03.446962 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cd25f9bb-a470-4aa4-8afa-6b484fa192c1-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg\" (UID: \"cd25f9bb-a470-4aa4-8afa-6b484fa192c1\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg" Oct 08 07:48:03 crc kubenswrapper[4693]: I1008 07:48:03.447719 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pk4n9\" (UniqueName: \"kubernetes.io/projected/cd25f9bb-a470-4aa4-8afa-6b484fa192c1-kube-api-access-pk4n9\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg\" (UID: \"cd25f9bb-a470-4aa4-8afa-6b484fa192c1\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg" Oct 08 07:48:03 crc kubenswrapper[4693]: I1008 07:48:03.447781 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cd25f9bb-a470-4aa4-8afa-6b484fa192c1-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg\" (UID: \"cd25f9bb-a470-4aa4-8afa-6b484fa192c1\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg" Oct 08 07:48:03 crc kubenswrapper[4693]: I1008 07:48:03.549593 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cd25f9bb-a470-4aa4-8afa-6b484fa192c1-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg\" (UID: \"cd25f9bb-a470-4aa4-8afa-6b484fa192c1\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg" Oct 08 07:48:03 crc kubenswrapper[4693]: I1008 07:48:03.549899 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pk4n9\" (UniqueName: \"kubernetes.io/projected/cd25f9bb-a470-4aa4-8afa-6b484fa192c1-kube-api-access-pk4n9\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg\" (UID: \"cd25f9bb-a470-4aa4-8afa-6b484fa192c1\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg" Oct 08 07:48:03 crc kubenswrapper[4693]: I1008 07:48:03.549942 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cd25f9bb-a470-4aa4-8afa-6b484fa192c1-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg\" (UID: \"cd25f9bb-a470-4aa4-8afa-6b484fa192c1\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg" Oct 08 07:48:03 crc kubenswrapper[4693]: I1008 07:48:03.554692 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cd25f9bb-a470-4aa4-8afa-6b484fa192c1-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg\" (UID: \"cd25f9bb-a470-4aa4-8afa-6b484fa192c1\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg" Oct 08 07:48:03 crc kubenswrapper[4693]: I1008 07:48:03.560562 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cd25f9bb-a470-4aa4-8afa-6b484fa192c1-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg\" (UID: \"cd25f9bb-a470-4aa4-8afa-6b484fa192c1\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg" Oct 08 07:48:03 crc kubenswrapper[4693]: I1008 07:48:03.573938 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pk4n9\" (UniqueName: \"kubernetes.io/projected/cd25f9bb-a470-4aa4-8afa-6b484fa192c1-kube-api-access-pk4n9\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg\" (UID: \"cd25f9bb-a470-4aa4-8afa-6b484fa192c1\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg" Oct 08 07:48:03 crc kubenswrapper[4693]: I1008 07:48:03.646850 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg" Oct 08 07:48:04 crc kubenswrapper[4693]: I1008 07:48:04.274168 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg"] Oct 08 07:48:05 crc kubenswrapper[4693]: I1008 07:48:05.247124 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg" event={"ID":"cd25f9bb-a470-4aa4-8afa-6b484fa192c1","Type":"ContainerStarted","Data":"36c5b354da7a42d05eede0c48696af3b9067108ed2c4f4f28dea986682695f16"} Oct 08 07:48:05 crc kubenswrapper[4693]: I1008 07:48:05.247606 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg" event={"ID":"cd25f9bb-a470-4aa4-8afa-6b484fa192c1","Type":"ContainerStarted","Data":"f25fbfbe9734646487c5f360b7f49536e8952ccc7946e3277359b6321d5869a9"} Oct 08 07:48:05 crc kubenswrapper[4693]: I1008 07:48:05.276438 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg" podStartSLOduration=1.782565146 podStartE2EDuration="2.276419675s" podCreationTimestamp="2025-10-08 07:48:03 +0000 UTC" firstStartedPulling="2025-10-08 07:48:04.27688678 +0000 UTC m=+1869.647851735" lastFinishedPulling="2025-10-08 07:48:04.770741329 +0000 UTC m=+1870.141706264" observedRunningTime="2025-10-08 07:48:05.268750306 +0000 UTC m=+1870.639715271" watchObservedRunningTime="2025-10-08 07:48:05.276419675 +0000 UTC m=+1870.647384610" Oct 08 07:48:15 crc kubenswrapper[4693]: I1008 07:48:15.358184 4693 generic.go:334] "Generic (PLEG): container finished" podID="cd25f9bb-a470-4aa4-8afa-6b484fa192c1" containerID="36c5b354da7a42d05eede0c48696af3b9067108ed2c4f4f28dea986682695f16" exitCode=0 Oct 08 07:48:15 crc kubenswrapper[4693]: I1008 07:48:15.358249 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg" event={"ID":"cd25f9bb-a470-4aa4-8afa-6b484fa192c1","Type":"ContainerDied","Data":"36c5b354da7a42d05eede0c48696af3b9067108ed2c4f4f28dea986682695f16"} Oct 08 07:48:16 crc kubenswrapper[4693]: I1008 07:48:16.832785 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg" Oct 08 07:48:16 crc kubenswrapper[4693]: I1008 07:48:16.921179 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cd25f9bb-a470-4aa4-8afa-6b484fa192c1-inventory\") pod \"cd25f9bb-a470-4aa4-8afa-6b484fa192c1\" (UID: \"cd25f9bb-a470-4aa4-8afa-6b484fa192c1\") " Oct 08 07:48:16 crc kubenswrapper[4693]: I1008 07:48:16.921609 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cd25f9bb-a470-4aa4-8afa-6b484fa192c1-ssh-key\") pod \"cd25f9bb-a470-4aa4-8afa-6b484fa192c1\" (UID: \"cd25f9bb-a470-4aa4-8afa-6b484fa192c1\") " Oct 08 07:48:16 crc kubenswrapper[4693]: I1008 07:48:16.921738 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pk4n9\" (UniqueName: \"kubernetes.io/projected/cd25f9bb-a470-4aa4-8afa-6b484fa192c1-kube-api-access-pk4n9\") pod \"cd25f9bb-a470-4aa4-8afa-6b484fa192c1\" (UID: \"cd25f9bb-a470-4aa4-8afa-6b484fa192c1\") " Oct 08 07:48:16 crc kubenswrapper[4693]: I1008 07:48:16.927302 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd25f9bb-a470-4aa4-8afa-6b484fa192c1-kube-api-access-pk4n9" (OuterVolumeSpecName: "kube-api-access-pk4n9") pod "cd25f9bb-a470-4aa4-8afa-6b484fa192c1" (UID: "cd25f9bb-a470-4aa4-8afa-6b484fa192c1"). InnerVolumeSpecName "kube-api-access-pk4n9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:48:16 crc kubenswrapper[4693]: I1008 07:48:16.946689 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd25f9bb-a470-4aa4-8afa-6b484fa192c1-inventory" (OuterVolumeSpecName: "inventory") pod "cd25f9bb-a470-4aa4-8afa-6b484fa192c1" (UID: "cd25f9bb-a470-4aa4-8afa-6b484fa192c1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:48:16 crc kubenswrapper[4693]: I1008 07:48:16.951337 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd25f9bb-a470-4aa4-8afa-6b484fa192c1-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "cd25f9bb-a470-4aa4-8afa-6b484fa192c1" (UID: "cd25f9bb-a470-4aa4-8afa-6b484fa192c1"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.024274 4693 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cd25f9bb-a470-4aa4-8afa-6b484fa192c1-inventory\") on node \"crc\" DevicePath \"\"" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.024309 4693 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cd25f9bb-a470-4aa4-8afa-6b484fa192c1-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.024323 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pk4n9\" (UniqueName: \"kubernetes.io/projected/cd25f9bb-a470-4aa4-8afa-6b484fa192c1-kube-api-access-pk4n9\") on node \"crc\" DevicePath \"\"" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.379908 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg" event={"ID":"cd25f9bb-a470-4aa4-8afa-6b484fa192c1","Type":"ContainerDied","Data":"f25fbfbe9734646487c5f360b7f49536e8952ccc7946e3277359b6321d5869a9"} Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.380148 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f25fbfbe9734646487c5f360b7f49536e8952ccc7946e3277359b6321d5869a9" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.380032 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.508297 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp"] Oct 08 07:48:17 crc kubenswrapper[4693]: E1008 07:48:17.508728 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd25f9bb-a470-4aa4-8afa-6b484fa192c1" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.508752 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd25f9bb-a470-4aa4-8afa-6b484fa192c1" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.509052 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd25f9bb-a470-4aa4-8afa-6b484fa192c1" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.509885 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.514895 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.515240 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.515653 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.515767 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.516136 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zxm2d" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.516123 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.516828 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.517919 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.525170 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp"] Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.652054 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.652160 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.652305 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.652400 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.652540 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.652620 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.652720 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.652791 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.652911 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8r2vg\" (UniqueName: \"kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-kube-api-access-8r2vg\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.653070 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.653153 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.653218 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.653273 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.653349 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.755864 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.756295 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.756516 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.756579 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.756655 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.756692 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.756741 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8r2vg\" (UniqueName: \"kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-kube-api-access-8r2vg\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.756877 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.756924 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.756961 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.756994 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.757208 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.757250 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.757284 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.762386 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.762491 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.762916 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.763794 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.764148 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.764396 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.764795 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.764853 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.765063 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.765556 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.768141 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.768292 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.770583 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.787216 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8r2vg\" (UniqueName: \"kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-kube-api-access-8r2vg\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:17 crc kubenswrapper[4693]: I1008 07:48:17.839916 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:48:18 crc kubenswrapper[4693]: I1008 07:48:18.404061 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp"] Oct 08 07:48:18 crc kubenswrapper[4693]: W1008 07:48:18.411641 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod36d7c0e2_4414_4f5d_ace2_37e627b6e330.slice/crio-a2a25f271281a74c69cd811dc0a279afa3f91ae86696a4477834d4d4abaaeb90 WatchSource:0}: Error finding container a2a25f271281a74c69cd811dc0a279afa3f91ae86696a4477834d4d4abaaeb90: Status 404 returned error can't find the container with id a2a25f271281a74c69cd811dc0a279afa3f91ae86696a4477834d4d4abaaeb90 Oct 08 07:48:18 crc kubenswrapper[4693]: I1008 07:48:18.413978 4693 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 08 07:48:19 crc kubenswrapper[4693]: I1008 07:48:19.410228 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" event={"ID":"36d7c0e2-4414-4f5d-ace2-37e627b6e330","Type":"ContainerStarted","Data":"bb0132b856d5f960f9a9b6b208b74c5fbdecf8bc21f130b35f964275b9c8f728"} Oct 08 07:48:19 crc kubenswrapper[4693]: I1008 07:48:19.411019 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" event={"ID":"36d7c0e2-4414-4f5d-ace2-37e627b6e330","Type":"ContainerStarted","Data":"a2a25f271281a74c69cd811dc0a279afa3f91ae86696a4477834d4d4abaaeb90"} Oct 08 07:48:19 crc kubenswrapper[4693]: I1008 07:48:19.447577 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" podStartSLOduration=2.027850554 podStartE2EDuration="2.447552119s" podCreationTimestamp="2025-10-08 07:48:17 +0000 UTC" firstStartedPulling="2025-10-08 07:48:18.413727055 +0000 UTC m=+1883.784691990" lastFinishedPulling="2025-10-08 07:48:18.83342862 +0000 UTC m=+1884.204393555" observedRunningTime="2025-10-08 07:48:19.440323591 +0000 UTC m=+1884.811288566" watchObservedRunningTime="2025-10-08 07:48:19.447552119 +0000 UTC m=+1884.818517094" Oct 08 07:48:53 crc kubenswrapper[4693]: I1008 07:48:53.490431 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:48:53 crc kubenswrapper[4693]: I1008 07:48:53.490938 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:49:02 crc kubenswrapper[4693]: I1008 07:49:02.879644 4693 generic.go:334] "Generic (PLEG): container finished" podID="36d7c0e2-4414-4f5d-ace2-37e627b6e330" containerID="bb0132b856d5f960f9a9b6b208b74c5fbdecf8bc21f130b35f964275b9c8f728" exitCode=0 Oct 08 07:49:02 crc kubenswrapper[4693]: I1008 07:49:02.879855 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" event={"ID":"36d7c0e2-4414-4f5d-ace2-37e627b6e330","Type":"ContainerDied","Data":"bb0132b856d5f960f9a9b6b208b74c5fbdecf8bc21f130b35f964275b9c8f728"} Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.438057 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.515223 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-repo-setup-combined-ca-bundle\") pod \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.515351 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-openstack-edpm-ipam-ovn-default-certs-0\") pod \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.515394 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.515455 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-neutron-metadata-combined-ca-bundle\") pod \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.515504 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8r2vg\" (UniqueName: \"kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-kube-api-access-8r2vg\") pod \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.515569 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.515608 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-bootstrap-combined-ca-bundle\") pod \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.515691 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.516441 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-nova-combined-ca-bundle\") pod \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.516497 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-libvirt-combined-ca-bundle\") pod \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.516600 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-telemetry-combined-ca-bundle\") pod \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.516711 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-ssh-key\") pod \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.516773 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-ovn-combined-ca-bundle\") pod \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.516803 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-inventory\") pod \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\" (UID: \"36d7c0e2-4414-4f5d-ace2-37e627b6e330\") " Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.522392 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "36d7c0e2-4414-4f5d-ace2-37e627b6e330" (UID: "36d7c0e2-4414-4f5d-ace2-37e627b6e330"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.522977 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "36d7c0e2-4414-4f5d-ace2-37e627b6e330" (UID: "36d7c0e2-4414-4f5d-ace2-37e627b6e330"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.523199 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "36d7c0e2-4414-4f5d-ace2-37e627b6e330" (UID: "36d7c0e2-4414-4f5d-ace2-37e627b6e330"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.524061 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-kube-api-access-8r2vg" (OuterVolumeSpecName: "kube-api-access-8r2vg") pod "36d7c0e2-4414-4f5d-ace2-37e627b6e330" (UID: "36d7c0e2-4414-4f5d-ace2-37e627b6e330"). InnerVolumeSpecName "kube-api-access-8r2vg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.525327 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "36d7c0e2-4414-4f5d-ace2-37e627b6e330" (UID: "36d7c0e2-4414-4f5d-ace2-37e627b6e330"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.525856 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "36d7c0e2-4414-4f5d-ace2-37e627b6e330" (UID: "36d7c0e2-4414-4f5d-ace2-37e627b6e330"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.526142 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "36d7c0e2-4414-4f5d-ace2-37e627b6e330" (UID: "36d7c0e2-4414-4f5d-ace2-37e627b6e330"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.526906 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "36d7c0e2-4414-4f5d-ace2-37e627b6e330" (UID: "36d7c0e2-4414-4f5d-ace2-37e627b6e330"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.527298 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "36d7c0e2-4414-4f5d-ace2-37e627b6e330" (UID: "36d7c0e2-4414-4f5d-ace2-37e627b6e330"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.527749 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "36d7c0e2-4414-4f5d-ace2-37e627b6e330" (UID: "36d7c0e2-4414-4f5d-ace2-37e627b6e330"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.527922 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "36d7c0e2-4414-4f5d-ace2-37e627b6e330" (UID: "36d7c0e2-4414-4f5d-ace2-37e627b6e330"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.528091 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "36d7c0e2-4414-4f5d-ace2-37e627b6e330" (UID: "36d7c0e2-4414-4f5d-ace2-37e627b6e330"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.555263 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "36d7c0e2-4414-4f5d-ace2-37e627b6e330" (UID: "36d7c0e2-4414-4f5d-ace2-37e627b6e330"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.563950 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-inventory" (OuterVolumeSpecName: "inventory") pod "36d7c0e2-4414-4f5d-ace2-37e627b6e330" (UID: "36d7c0e2-4414-4f5d-ace2-37e627b6e330"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.618805 4693 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.619001 4693 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.619059 4693 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-inventory\") on node \"crc\" DevicePath \"\"" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.619115 4693 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.619167 4693 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.619218 4693 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.619287 4693 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.619344 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8r2vg\" (UniqueName: \"kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-kube-api-access-8r2vg\") on node \"crc\" DevicePath \"\"" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.619395 4693 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.619449 4693 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.619501 4693 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/36d7c0e2-4414-4f5d-ace2-37e627b6e330-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.619557 4693 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.619615 4693 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.619669 4693 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d7c0e2-4414-4f5d-ace2-37e627b6e330-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.904644 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" event={"ID":"36d7c0e2-4414-4f5d-ace2-37e627b6e330","Type":"ContainerDied","Data":"a2a25f271281a74c69cd811dc0a279afa3f91ae86696a4477834d4d4abaaeb90"} Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.904703 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a2a25f271281a74c69cd811dc0a279afa3f91ae86696a4477834d4d4abaaeb90" Oct 08 07:49:04 crc kubenswrapper[4693]: I1008 07:49:04.904794 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp" Oct 08 07:49:05 crc kubenswrapper[4693]: I1008 07:49:05.042561 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-ncmdw"] Oct 08 07:49:05 crc kubenswrapper[4693]: E1008 07:49:05.043543 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36d7c0e2-4414-4f5d-ace2-37e627b6e330" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 08 07:49:05 crc kubenswrapper[4693]: I1008 07:49:05.043596 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="36d7c0e2-4414-4f5d-ace2-37e627b6e330" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 08 07:49:05 crc kubenswrapper[4693]: I1008 07:49:05.044037 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="36d7c0e2-4414-4f5d-ace2-37e627b6e330" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 08 07:49:05 crc kubenswrapper[4693]: I1008 07:49:05.045118 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ncmdw" Oct 08 07:49:05 crc kubenswrapper[4693]: I1008 07:49:05.049011 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Oct 08 07:49:05 crc kubenswrapper[4693]: I1008 07:49:05.049060 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 08 07:49:05 crc kubenswrapper[4693]: I1008 07:49:05.049275 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 08 07:49:05 crc kubenswrapper[4693]: I1008 07:49:05.049480 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zxm2d" Oct 08 07:49:05 crc kubenswrapper[4693]: I1008 07:49:05.049689 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 08 07:49:05 crc kubenswrapper[4693]: I1008 07:49:05.081020 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-ncmdw"] Oct 08 07:49:05 crc kubenswrapper[4693]: I1008 07:49:05.129230 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2715cea9-fa27-469b-988a-338c5b80f62d-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ncmdw\" (UID: \"2715cea9-fa27-469b-988a-338c5b80f62d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ncmdw" Oct 08 07:49:05 crc kubenswrapper[4693]: I1008 07:49:05.129572 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhkwf\" (UniqueName: \"kubernetes.io/projected/2715cea9-fa27-469b-988a-338c5b80f62d-kube-api-access-bhkwf\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ncmdw\" (UID: \"2715cea9-fa27-469b-988a-338c5b80f62d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ncmdw" Oct 08 07:49:05 crc kubenswrapper[4693]: I1008 07:49:05.129623 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2715cea9-fa27-469b-988a-338c5b80f62d-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ncmdw\" (UID: \"2715cea9-fa27-469b-988a-338c5b80f62d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ncmdw" Oct 08 07:49:05 crc kubenswrapper[4693]: I1008 07:49:05.129700 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2715cea9-fa27-469b-988a-338c5b80f62d-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ncmdw\" (UID: \"2715cea9-fa27-469b-988a-338c5b80f62d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ncmdw" Oct 08 07:49:05 crc kubenswrapper[4693]: I1008 07:49:05.130009 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2715cea9-fa27-469b-988a-338c5b80f62d-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ncmdw\" (UID: \"2715cea9-fa27-469b-988a-338c5b80f62d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ncmdw" Oct 08 07:49:05 crc kubenswrapper[4693]: I1008 07:49:05.232673 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2715cea9-fa27-469b-988a-338c5b80f62d-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ncmdw\" (UID: \"2715cea9-fa27-469b-988a-338c5b80f62d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ncmdw" Oct 08 07:49:05 crc kubenswrapper[4693]: I1008 07:49:05.233183 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2715cea9-fa27-469b-988a-338c5b80f62d-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ncmdw\" (UID: \"2715cea9-fa27-469b-988a-338c5b80f62d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ncmdw" Oct 08 07:49:05 crc kubenswrapper[4693]: I1008 07:49:05.233344 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhkwf\" (UniqueName: \"kubernetes.io/projected/2715cea9-fa27-469b-988a-338c5b80f62d-kube-api-access-bhkwf\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ncmdw\" (UID: \"2715cea9-fa27-469b-988a-338c5b80f62d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ncmdw" Oct 08 07:49:05 crc kubenswrapper[4693]: I1008 07:49:05.233414 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2715cea9-fa27-469b-988a-338c5b80f62d-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ncmdw\" (UID: \"2715cea9-fa27-469b-988a-338c5b80f62d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ncmdw" Oct 08 07:49:05 crc kubenswrapper[4693]: I1008 07:49:05.233493 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2715cea9-fa27-469b-988a-338c5b80f62d-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ncmdw\" (UID: \"2715cea9-fa27-469b-988a-338c5b80f62d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ncmdw" Oct 08 07:49:05 crc kubenswrapper[4693]: I1008 07:49:05.235096 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2715cea9-fa27-469b-988a-338c5b80f62d-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ncmdw\" (UID: \"2715cea9-fa27-469b-988a-338c5b80f62d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ncmdw" Oct 08 07:49:05 crc kubenswrapper[4693]: I1008 07:49:05.238591 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2715cea9-fa27-469b-988a-338c5b80f62d-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ncmdw\" (UID: \"2715cea9-fa27-469b-988a-338c5b80f62d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ncmdw" Oct 08 07:49:05 crc kubenswrapper[4693]: I1008 07:49:05.239660 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2715cea9-fa27-469b-988a-338c5b80f62d-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ncmdw\" (UID: \"2715cea9-fa27-469b-988a-338c5b80f62d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ncmdw" Oct 08 07:49:05 crc kubenswrapper[4693]: I1008 07:49:05.245342 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2715cea9-fa27-469b-988a-338c5b80f62d-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ncmdw\" (UID: \"2715cea9-fa27-469b-988a-338c5b80f62d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ncmdw" Oct 08 07:49:05 crc kubenswrapper[4693]: I1008 07:49:05.266799 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhkwf\" (UniqueName: \"kubernetes.io/projected/2715cea9-fa27-469b-988a-338c5b80f62d-kube-api-access-bhkwf\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ncmdw\" (UID: \"2715cea9-fa27-469b-988a-338c5b80f62d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ncmdw" Oct 08 07:49:05 crc kubenswrapper[4693]: I1008 07:49:05.372608 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ncmdw" Oct 08 07:49:05 crc kubenswrapper[4693]: I1008 07:49:05.941649 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-ncmdw"] Oct 08 07:49:06 crc kubenswrapper[4693]: I1008 07:49:06.926884 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ncmdw" event={"ID":"2715cea9-fa27-469b-988a-338c5b80f62d","Type":"ContainerStarted","Data":"a5899d143b07a3c47f80513aecbc480891edd5efde5e3e9174be99719d793d95"} Oct 08 07:49:06 crc kubenswrapper[4693]: I1008 07:49:06.927309 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ncmdw" event={"ID":"2715cea9-fa27-469b-988a-338c5b80f62d","Type":"ContainerStarted","Data":"b71dfce7c0ce0e223f621d723d8a31ae68e648d3bf88fac597b81ca9ad72cacb"} Oct 08 07:49:06 crc kubenswrapper[4693]: I1008 07:49:06.945175 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ncmdw" podStartSLOduration=1.24738085 podStartE2EDuration="1.945159248s" podCreationTimestamp="2025-10-08 07:49:05 +0000 UTC" firstStartedPulling="2025-10-08 07:49:05.953831356 +0000 UTC m=+1931.324796301" lastFinishedPulling="2025-10-08 07:49:06.651609744 +0000 UTC m=+1932.022574699" observedRunningTime="2025-10-08 07:49:06.940156548 +0000 UTC m=+1932.311121513" watchObservedRunningTime="2025-10-08 07:49:06.945159248 +0000 UTC m=+1932.316124183" Oct 08 07:49:23 crc kubenswrapper[4693]: I1008 07:49:23.489725 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:49:23 crc kubenswrapper[4693]: I1008 07:49:23.491570 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:49:42 crc kubenswrapper[4693]: I1008 07:49:42.440760 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8r6gp"] Oct 08 07:49:42 crc kubenswrapper[4693]: I1008 07:49:42.445639 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8r6gp" Oct 08 07:49:42 crc kubenswrapper[4693]: I1008 07:49:42.468588 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8r6gp"] Oct 08 07:49:42 crc kubenswrapper[4693]: I1008 07:49:42.605613 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80ecf2ce-9fe6-4d4f-a021-890655c8f4b2-utilities\") pod \"redhat-marketplace-8r6gp\" (UID: \"80ecf2ce-9fe6-4d4f-a021-890655c8f4b2\") " pod="openshift-marketplace/redhat-marketplace-8r6gp" Oct 08 07:49:42 crc kubenswrapper[4693]: I1008 07:49:42.605688 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqwfh\" (UniqueName: \"kubernetes.io/projected/80ecf2ce-9fe6-4d4f-a021-890655c8f4b2-kube-api-access-bqwfh\") pod \"redhat-marketplace-8r6gp\" (UID: \"80ecf2ce-9fe6-4d4f-a021-890655c8f4b2\") " pod="openshift-marketplace/redhat-marketplace-8r6gp" Oct 08 07:49:42 crc kubenswrapper[4693]: I1008 07:49:42.605848 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80ecf2ce-9fe6-4d4f-a021-890655c8f4b2-catalog-content\") pod \"redhat-marketplace-8r6gp\" (UID: \"80ecf2ce-9fe6-4d4f-a021-890655c8f4b2\") " pod="openshift-marketplace/redhat-marketplace-8r6gp" Oct 08 07:49:42 crc kubenswrapper[4693]: I1008 07:49:42.707559 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80ecf2ce-9fe6-4d4f-a021-890655c8f4b2-utilities\") pod \"redhat-marketplace-8r6gp\" (UID: \"80ecf2ce-9fe6-4d4f-a021-890655c8f4b2\") " pod="openshift-marketplace/redhat-marketplace-8r6gp" Oct 08 07:49:42 crc kubenswrapper[4693]: I1008 07:49:42.707653 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqwfh\" (UniqueName: \"kubernetes.io/projected/80ecf2ce-9fe6-4d4f-a021-890655c8f4b2-kube-api-access-bqwfh\") pod \"redhat-marketplace-8r6gp\" (UID: \"80ecf2ce-9fe6-4d4f-a021-890655c8f4b2\") " pod="openshift-marketplace/redhat-marketplace-8r6gp" Oct 08 07:49:42 crc kubenswrapper[4693]: I1008 07:49:42.707888 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80ecf2ce-9fe6-4d4f-a021-890655c8f4b2-catalog-content\") pod \"redhat-marketplace-8r6gp\" (UID: \"80ecf2ce-9fe6-4d4f-a021-890655c8f4b2\") " pod="openshift-marketplace/redhat-marketplace-8r6gp" Oct 08 07:49:42 crc kubenswrapper[4693]: I1008 07:49:42.708317 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80ecf2ce-9fe6-4d4f-a021-890655c8f4b2-utilities\") pod \"redhat-marketplace-8r6gp\" (UID: \"80ecf2ce-9fe6-4d4f-a021-890655c8f4b2\") " pod="openshift-marketplace/redhat-marketplace-8r6gp" Oct 08 07:49:42 crc kubenswrapper[4693]: I1008 07:49:42.708491 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80ecf2ce-9fe6-4d4f-a021-890655c8f4b2-catalog-content\") pod \"redhat-marketplace-8r6gp\" (UID: \"80ecf2ce-9fe6-4d4f-a021-890655c8f4b2\") " pod="openshift-marketplace/redhat-marketplace-8r6gp" Oct 08 07:49:42 crc kubenswrapper[4693]: I1008 07:49:42.732568 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqwfh\" (UniqueName: \"kubernetes.io/projected/80ecf2ce-9fe6-4d4f-a021-890655c8f4b2-kube-api-access-bqwfh\") pod \"redhat-marketplace-8r6gp\" (UID: \"80ecf2ce-9fe6-4d4f-a021-890655c8f4b2\") " pod="openshift-marketplace/redhat-marketplace-8r6gp" Oct 08 07:49:42 crc kubenswrapper[4693]: I1008 07:49:42.780153 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8r6gp" Oct 08 07:49:43 crc kubenswrapper[4693]: I1008 07:49:43.262435 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8r6gp"] Oct 08 07:49:43 crc kubenswrapper[4693]: W1008 07:49:43.276602 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod80ecf2ce_9fe6_4d4f_a021_890655c8f4b2.slice/crio-0d474d42ed2b00e1adb32f5e0cfca24d957367769df7a56216baf10836f6beaf WatchSource:0}: Error finding container 0d474d42ed2b00e1adb32f5e0cfca24d957367769df7a56216baf10836f6beaf: Status 404 returned error can't find the container with id 0d474d42ed2b00e1adb32f5e0cfca24d957367769df7a56216baf10836f6beaf Oct 08 07:49:43 crc kubenswrapper[4693]: I1008 07:49:43.303915 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8r6gp" event={"ID":"80ecf2ce-9fe6-4d4f-a021-890655c8f4b2","Type":"ContainerStarted","Data":"0d474d42ed2b00e1adb32f5e0cfca24d957367769df7a56216baf10836f6beaf"} Oct 08 07:49:44 crc kubenswrapper[4693]: I1008 07:49:44.315344 4693 generic.go:334] "Generic (PLEG): container finished" podID="80ecf2ce-9fe6-4d4f-a021-890655c8f4b2" containerID="50cb223e5bf92be1395500a5eac9fdd3fa5e322e15d9091381983c6da050beab" exitCode=0 Oct 08 07:49:44 crc kubenswrapper[4693]: I1008 07:49:44.315442 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8r6gp" event={"ID":"80ecf2ce-9fe6-4d4f-a021-890655c8f4b2","Type":"ContainerDied","Data":"50cb223e5bf92be1395500a5eac9fdd3fa5e322e15d9091381983c6da050beab"} Oct 08 07:49:45 crc kubenswrapper[4693]: I1008 07:49:45.327330 4693 generic.go:334] "Generic (PLEG): container finished" podID="80ecf2ce-9fe6-4d4f-a021-890655c8f4b2" containerID="bfa93961fff3a95af06ba19e67609bd5713e5653a2bae4e850fa82b295f31424" exitCode=0 Oct 08 07:49:45 crc kubenswrapper[4693]: I1008 07:49:45.327374 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8r6gp" event={"ID":"80ecf2ce-9fe6-4d4f-a021-890655c8f4b2","Type":"ContainerDied","Data":"bfa93961fff3a95af06ba19e67609bd5713e5653a2bae4e850fa82b295f31424"} Oct 08 07:49:46 crc kubenswrapper[4693]: I1008 07:49:46.343314 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8r6gp" event={"ID":"80ecf2ce-9fe6-4d4f-a021-890655c8f4b2","Type":"ContainerStarted","Data":"71322ea0685fe836965c81376b714570bee2ca86e6e109dcb188c7530180cf66"} Oct 08 07:49:46 crc kubenswrapper[4693]: I1008 07:49:46.382470 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8r6gp" podStartSLOduration=2.7723108549999997 podStartE2EDuration="4.382439604s" podCreationTimestamp="2025-10-08 07:49:42 +0000 UTC" firstStartedPulling="2025-10-08 07:49:44.317968845 +0000 UTC m=+1969.688933820" lastFinishedPulling="2025-10-08 07:49:45.928097634 +0000 UTC m=+1971.299062569" observedRunningTime="2025-10-08 07:49:46.378748178 +0000 UTC m=+1971.749713133" watchObservedRunningTime="2025-10-08 07:49:46.382439604 +0000 UTC m=+1971.753404579" Oct 08 07:49:52 crc kubenswrapper[4693]: I1008 07:49:52.781186 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8r6gp" Oct 08 07:49:52 crc kubenswrapper[4693]: I1008 07:49:52.781881 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8r6gp" Oct 08 07:49:52 crc kubenswrapper[4693]: I1008 07:49:52.878888 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8r6gp" Oct 08 07:49:53 crc kubenswrapper[4693]: I1008 07:49:53.474885 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8r6gp" Oct 08 07:49:53 crc kubenswrapper[4693]: I1008 07:49:53.490497 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:49:53 crc kubenswrapper[4693]: I1008 07:49:53.490567 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:49:53 crc kubenswrapper[4693]: I1008 07:49:53.490618 4693 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 07:49:53 crc kubenswrapper[4693]: I1008 07:49:53.491422 4693 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ebfdbc64dc1ffeb4225c54ca810a22cc10ba58b76c91b45301c1195e461b6b73"} pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 08 07:49:53 crc kubenswrapper[4693]: I1008 07:49:53.491496 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" containerID="cri-o://ebfdbc64dc1ffeb4225c54ca810a22cc10ba58b76c91b45301c1195e461b6b73" gracePeriod=600 Oct 08 07:49:53 crc kubenswrapper[4693]: I1008 07:49:53.523423 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8r6gp"] Oct 08 07:49:54 crc kubenswrapper[4693]: I1008 07:49:54.430158 4693 generic.go:334] "Generic (PLEG): container finished" podID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerID="ebfdbc64dc1ffeb4225c54ca810a22cc10ba58b76c91b45301c1195e461b6b73" exitCode=0 Oct 08 07:49:54 crc kubenswrapper[4693]: I1008 07:49:54.430301 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerDied","Data":"ebfdbc64dc1ffeb4225c54ca810a22cc10ba58b76c91b45301c1195e461b6b73"} Oct 08 07:49:54 crc kubenswrapper[4693]: I1008 07:49:54.431218 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerStarted","Data":"c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f"} Oct 08 07:49:54 crc kubenswrapper[4693]: I1008 07:49:54.431234 4693 scope.go:117] "RemoveContainer" containerID="c4b08d058f9300424d11551703e24e7f3d5cd4c5d8a1670ee219c22717498503" Oct 08 07:49:55 crc kubenswrapper[4693]: I1008 07:49:55.449664 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8r6gp" podUID="80ecf2ce-9fe6-4d4f-a021-890655c8f4b2" containerName="registry-server" containerID="cri-o://71322ea0685fe836965c81376b714570bee2ca86e6e109dcb188c7530180cf66" gracePeriod=2 Oct 08 07:49:55 crc kubenswrapper[4693]: I1008 07:49:55.976501 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8r6gp" Oct 08 07:49:56 crc kubenswrapper[4693]: I1008 07:49:56.102107 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bqwfh\" (UniqueName: \"kubernetes.io/projected/80ecf2ce-9fe6-4d4f-a021-890655c8f4b2-kube-api-access-bqwfh\") pod \"80ecf2ce-9fe6-4d4f-a021-890655c8f4b2\" (UID: \"80ecf2ce-9fe6-4d4f-a021-890655c8f4b2\") " Oct 08 07:49:56 crc kubenswrapper[4693]: I1008 07:49:56.102180 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80ecf2ce-9fe6-4d4f-a021-890655c8f4b2-utilities\") pod \"80ecf2ce-9fe6-4d4f-a021-890655c8f4b2\" (UID: \"80ecf2ce-9fe6-4d4f-a021-890655c8f4b2\") " Oct 08 07:49:56 crc kubenswrapper[4693]: I1008 07:49:56.102387 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80ecf2ce-9fe6-4d4f-a021-890655c8f4b2-catalog-content\") pod \"80ecf2ce-9fe6-4d4f-a021-890655c8f4b2\" (UID: \"80ecf2ce-9fe6-4d4f-a021-890655c8f4b2\") " Oct 08 07:49:56 crc kubenswrapper[4693]: I1008 07:49:56.103913 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80ecf2ce-9fe6-4d4f-a021-890655c8f4b2-utilities" (OuterVolumeSpecName: "utilities") pod "80ecf2ce-9fe6-4d4f-a021-890655c8f4b2" (UID: "80ecf2ce-9fe6-4d4f-a021-890655c8f4b2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:49:56 crc kubenswrapper[4693]: I1008 07:49:56.115613 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80ecf2ce-9fe6-4d4f-a021-890655c8f4b2-kube-api-access-bqwfh" (OuterVolumeSpecName: "kube-api-access-bqwfh") pod "80ecf2ce-9fe6-4d4f-a021-890655c8f4b2" (UID: "80ecf2ce-9fe6-4d4f-a021-890655c8f4b2"). InnerVolumeSpecName "kube-api-access-bqwfh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:49:56 crc kubenswrapper[4693]: I1008 07:49:56.117090 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80ecf2ce-9fe6-4d4f-a021-890655c8f4b2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "80ecf2ce-9fe6-4d4f-a021-890655c8f4b2" (UID: "80ecf2ce-9fe6-4d4f-a021-890655c8f4b2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:49:56 crc kubenswrapper[4693]: I1008 07:49:56.206061 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80ecf2ce-9fe6-4d4f-a021-890655c8f4b2-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 07:49:56 crc kubenswrapper[4693]: I1008 07:49:56.206124 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bqwfh\" (UniqueName: \"kubernetes.io/projected/80ecf2ce-9fe6-4d4f-a021-890655c8f4b2-kube-api-access-bqwfh\") on node \"crc\" DevicePath \"\"" Oct 08 07:49:56 crc kubenswrapper[4693]: I1008 07:49:56.206147 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80ecf2ce-9fe6-4d4f-a021-890655c8f4b2-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 07:49:56 crc kubenswrapper[4693]: I1008 07:49:56.459920 4693 generic.go:334] "Generic (PLEG): container finished" podID="80ecf2ce-9fe6-4d4f-a021-890655c8f4b2" containerID="71322ea0685fe836965c81376b714570bee2ca86e6e109dcb188c7530180cf66" exitCode=0 Oct 08 07:49:56 crc kubenswrapper[4693]: I1008 07:49:56.459975 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8r6gp" event={"ID":"80ecf2ce-9fe6-4d4f-a021-890655c8f4b2","Type":"ContainerDied","Data":"71322ea0685fe836965c81376b714570bee2ca86e6e109dcb188c7530180cf66"} Oct 08 07:49:56 crc kubenswrapper[4693]: I1008 07:49:56.460007 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8r6gp" event={"ID":"80ecf2ce-9fe6-4d4f-a021-890655c8f4b2","Type":"ContainerDied","Data":"0d474d42ed2b00e1adb32f5e0cfca24d957367769df7a56216baf10836f6beaf"} Oct 08 07:49:56 crc kubenswrapper[4693]: I1008 07:49:56.460030 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8r6gp" Oct 08 07:49:56 crc kubenswrapper[4693]: I1008 07:49:56.460036 4693 scope.go:117] "RemoveContainer" containerID="71322ea0685fe836965c81376b714570bee2ca86e6e109dcb188c7530180cf66" Oct 08 07:49:56 crc kubenswrapper[4693]: I1008 07:49:56.509401 4693 scope.go:117] "RemoveContainer" containerID="bfa93961fff3a95af06ba19e67609bd5713e5653a2bae4e850fa82b295f31424" Oct 08 07:49:56 crc kubenswrapper[4693]: I1008 07:49:56.526924 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8r6gp"] Oct 08 07:49:56 crc kubenswrapper[4693]: I1008 07:49:56.538215 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8r6gp"] Oct 08 07:49:56 crc kubenswrapper[4693]: I1008 07:49:56.544082 4693 scope.go:117] "RemoveContainer" containerID="50cb223e5bf92be1395500a5eac9fdd3fa5e322e15d9091381983c6da050beab" Oct 08 07:49:56 crc kubenswrapper[4693]: I1008 07:49:56.622057 4693 scope.go:117] "RemoveContainer" containerID="71322ea0685fe836965c81376b714570bee2ca86e6e109dcb188c7530180cf66" Oct 08 07:49:56 crc kubenswrapper[4693]: E1008 07:49:56.622971 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71322ea0685fe836965c81376b714570bee2ca86e6e109dcb188c7530180cf66\": container with ID starting with 71322ea0685fe836965c81376b714570bee2ca86e6e109dcb188c7530180cf66 not found: ID does not exist" containerID="71322ea0685fe836965c81376b714570bee2ca86e6e109dcb188c7530180cf66" Oct 08 07:49:56 crc kubenswrapper[4693]: I1008 07:49:56.623024 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71322ea0685fe836965c81376b714570bee2ca86e6e109dcb188c7530180cf66"} err="failed to get container status \"71322ea0685fe836965c81376b714570bee2ca86e6e109dcb188c7530180cf66\": rpc error: code = NotFound desc = could not find container \"71322ea0685fe836965c81376b714570bee2ca86e6e109dcb188c7530180cf66\": container with ID starting with 71322ea0685fe836965c81376b714570bee2ca86e6e109dcb188c7530180cf66 not found: ID does not exist" Oct 08 07:49:56 crc kubenswrapper[4693]: I1008 07:49:56.623061 4693 scope.go:117] "RemoveContainer" containerID="bfa93961fff3a95af06ba19e67609bd5713e5653a2bae4e850fa82b295f31424" Oct 08 07:49:56 crc kubenswrapper[4693]: E1008 07:49:56.623584 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfa93961fff3a95af06ba19e67609bd5713e5653a2bae4e850fa82b295f31424\": container with ID starting with bfa93961fff3a95af06ba19e67609bd5713e5653a2bae4e850fa82b295f31424 not found: ID does not exist" containerID="bfa93961fff3a95af06ba19e67609bd5713e5653a2bae4e850fa82b295f31424" Oct 08 07:49:56 crc kubenswrapper[4693]: I1008 07:49:56.623622 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfa93961fff3a95af06ba19e67609bd5713e5653a2bae4e850fa82b295f31424"} err="failed to get container status \"bfa93961fff3a95af06ba19e67609bd5713e5653a2bae4e850fa82b295f31424\": rpc error: code = NotFound desc = could not find container \"bfa93961fff3a95af06ba19e67609bd5713e5653a2bae4e850fa82b295f31424\": container with ID starting with bfa93961fff3a95af06ba19e67609bd5713e5653a2bae4e850fa82b295f31424 not found: ID does not exist" Oct 08 07:49:56 crc kubenswrapper[4693]: I1008 07:49:56.623647 4693 scope.go:117] "RemoveContainer" containerID="50cb223e5bf92be1395500a5eac9fdd3fa5e322e15d9091381983c6da050beab" Oct 08 07:49:56 crc kubenswrapper[4693]: E1008 07:49:56.624169 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50cb223e5bf92be1395500a5eac9fdd3fa5e322e15d9091381983c6da050beab\": container with ID starting with 50cb223e5bf92be1395500a5eac9fdd3fa5e322e15d9091381983c6da050beab not found: ID does not exist" containerID="50cb223e5bf92be1395500a5eac9fdd3fa5e322e15d9091381983c6da050beab" Oct 08 07:49:56 crc kubenswrapper[4693]: I1008 07:49:56.624196 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50cb223e5bf92be1395500a5eac9fdd3fa5e322e15d9091381983c6da050beab"} err="failed to get container status \"50cb223e5bf92be1395500a5eac9fdd3fa5e322e15d9091381983c6da050beab\": rpc error: code = NotFound desc = could not find container \"50cb223e5bf92be1395500a5eac9fdd3fa5e322e15d9091381983c6da050beab\": container with ID starting with 50cb223e5bf92be1395500a5eac9fdd3fa5e322e15d9091381983c6da050beab not found: ID does not exist" Oct 08 07:49:57 crc kubenswrapper[4693]: I1008 07:49:57.374899 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80ecf2ce-9fe6-4d4f-a021-890655c8f4b2" path="/var/lib/kubelet/pods/80ecf2ce-9fe6-4d4f-a021-890655c8f4b2/volumes" Oct 08 07:50:18 crc kubenswrapper[4693]: I1008 07:50:18.691134 4693 generic.go:334] "Generic (PLEG): container finished" podID="2715cea9-fa27-469b-988a-338c5b80f62d" containerID="a5899d143b07a3c47f80513aecbc480891edd5efde5e3e9174be99719d793d95" exitCode=0 Oct 08 07:50:18 crc kubenswrapper[4693]: I1008 07:50:18.691314 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ncmdw" event={"ID":"2715cea9-fa27-469b-988a-338c5b80f62d","Type":"ContainerDied","Data":"a5899d143b07a3c47f80513aecbc480891edd5efde5e3e9174be99719d793d95"} Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.178233 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ncmdw" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.352911 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2715cea9-fa27-469b-988a-338c5b80f62d-ovn-combined-ca-bundle\") pod \"2715cea9-fa27-469b-988a-338c5b80f62d\" (UID: \"2715cea9-fa27-469b-988a-338c5b80f62d\") " Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.353347 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bhkwf\" (UniqueName: \"kubernetes.io/projected/2715cea9-fa27-469b-988a-338c5b80f62d-kube-api-access-bhkwf\") pod \"2715cea9-fa27-469b-988a-338c5b80f62d\" (UID: \"2715cea9-fa27-469b-988a-338c5b80f62d\") " Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.353413 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2715cea9-fa27-469b-988a-338c5b80f62d-inventory\") pod \"2715cea9-fa27-469b-988a-338c5b80f62d\" (UID: \"2715cea9-fa27-469b-988a-338c5b80f62d\") " Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.353442 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2715cea9-fa27-469b-988a-338c5b80f62d-ssh-key\") pod \"2715cea9-fa27-469b-988a-338c5b80f62d\" (UID: \"2715cea9-fa27-469b-988a-338c5b80f62d\") " Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.353564 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2715cea9-fa27-469b-988a-338c5b80f62d-ovncontroller-config-0\") pod \"2715cea9-fa27-469b-988a-338c5b80f62d\" (UID: \"2715cea9-fa27-469b-988a-338c5b80f62d\") " Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.361773 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2715cea9-fa27-469b-988a-338c5b80f62d-kube-api-access-bhkwf" (OuterVolumeSpecName: "kube-api-access-bhkwf") pod "2715cea9-fa27-469b-988a-338c5b80f62d" (UID: "2715cea9-fa27-469b-988a-338c5b80f62d"). InnerVolumeSpecName "kube-api-access-bhkwf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.367077 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2715cea9-fa27-469b-988a-338c5b80f62d-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "2715cea9-fa27-469b-988a-338c5b80f62d" (UID: "2715cea9-fa27-469b-988a-338c5b80f62d"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.381802 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2715cea9-fa27-469b-988a-338c5b80f62d-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "2715cea9-fa27-469b-988a-338c5b80f62d" (UID: "2715cea9-fa27-469b-988a-338c5b80f62d"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.383309 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2715cea9-fa27-469b-988a-338c5b80f62d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2715cea9-fa27-469b-988a-338c5b80f62d" (UID: "2715cea9-fa27-469b-988a-338c5b80f62d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.398424 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2715cea9-fa27-469b-988a-338c5b80f62d-inventory" (OuterVolumeSpecName: "inventory") pod "2715cea9-fa27-469b-988a-338c5b80f62d" (UID: "2715cea9-fa27-469b-988a-338c5b80f62d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.456613 4693 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2715cea9-fa27-469b-988a-338c5b80f62d-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.456663 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bhkwf\" (UniqueName: \"kubernetes.io/projected/2715cea9-fa27-469b-988a-338c5b80f62d-kube-api-access-bhkwf\") on node \"crc\" DevicePath \"\"" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.456680 4693 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2715cea9-fa27-469b-988a-338c5b80f62d-inventory\") on node \"crc\" DevicePath \"\"" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.456694 4693 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2715cea9-fa27-469b-988a-338c5b80f62d-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.456710 4693 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2715cea9-fa27-469b-988a-338c5b80f62d-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.719597 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ncmdw" event={"ID":"2715cea9-fa27-469b-988a-338c5b80f62d","Type":"ContainerDied","Data":"b71dfce7c0ce0e223f621d723d8a31ae68e648d3bf88fac597b81ca9ad72cacb"} Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.719662 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b71dfce7c0ce0e223f621d723d8a31ae68e648d3bf88fac597b81ca9ad72cacb" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.719697 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ncmdw" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.837646 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc"] Oct 08 07:50:20 crc kubenswrapper[4693]: E1008 07:50:20.838004 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80ecf2ce-9fe6-4d4f-a021-890655c8f4b2" containerName="registry-server" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.838020 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="80ecf2ce-9fe6-4d4f-a021-890655c8f4b2" containerName="registry-server" Oct 08 07:50:20 crc kubenswrapper[4693]: E1008 07:50:20.838036 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80ecf2ce-9fe6-4d4f-a021-890655c8f4b2" containerName="extract-utilities" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.838043 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="80ecf2ce-9fe6-4d4f-a021-890655c8f4b2" containerName="extract-utilities" Oct 08 07:50:20 crc kubenswrapper[4693]: E1008 07:50:20.838054 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2715cea9-fa27-469b-988a-338c5b80f62d" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.838061 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="2715cea9-fa27-469b-988a-338c5b80f62d" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 08 07:50:20 crc kubenswrapper[4693]: E1008 07:50:20.838090 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80ecf2ce-9fe6-4d4f-a021-890655c8f4b2" containerName="extract-content" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.838097 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="80ecf2ce-9fe6-4d4f-a021-890655c8f4b2" containerName="extract-content" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.838282 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="80ecf2ce-9fe6-4d4f-a021-890655c8f4b2" containerName="registry-server" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.838298 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="2715cea9-fa27-469b-988a-338c5b80f62d" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.838898 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.841784 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.842409 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.842755 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zxm2d" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.843118 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.843510 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.847279 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.861990 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc"] Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.966771 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc\" (UID: \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.966951 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc\" (UID: \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.967037 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc\" (UID: \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.967361 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lbtr\" (UniqueName: \"kubernetes.io/projected/0d3eae26-e892-4687-bd4c-4cbd1a566e56-kube-api-access-6lbtr\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc\" (UID: \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.967613 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc\" (UID: \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc" Oct 08 07:50:20 crc kubenswrapper[4693]: I1008 07:50:20.967665 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc\" (UID: \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc" Oct 08 07:50:21 crc kubenswrapper[4693]: I1008 07:50:21.069784 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lbtr\" (UniqueName: \"kubernetes.io/projected/0d3eae26-e892-4687-bd4c-4cbd1a566e56-kube-api-access-6lbtr\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc\" (UID: \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc" Oct 08 07:50:21 crc kubenswrapper[4693]: I1008 07:50:21.069871 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc\" (UID: \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc" Oct 08 07:50:21 crc kubenswrapper[4693]: I1008 07:50:21.069892 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc\" (UID: \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc" Oct 08 07:50:21 crc kubenswrapper[4693]: I1008 07:50:21.069950 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc\" (UID: \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc" Oct 08 07:50:21 crc kubenswrapper[4693]: I1008 07:50:21.069988 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc\" (UID: \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc" Oct 08 07:50:21 crc kubenswrapper[4693]: I1008 07:50:21.070025 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc\" (UID: \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc" Oct 08 07:50:21 crc kubenswrapper[4693]: I1008 07:50:21.074068 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc\" (UID: \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc" Oct 08 07:50:21 crc kubenswrapper[4693]: I1008 07:50:21.074409 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc\" (UID: \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc" Oct 08 07:50:21 crc kubenswrapper[4693]: I1008 07:50:21.074743 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc\" (UID: \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc" Oct 08 07:50:21 crc kubenswrapper[4693]: I1008 07:50:21.075952 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc\" (UID: \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc" Oct 08 07:50:21 crc kubenswrapper[4693]: I1008 07:50:21.085003 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc\" (UID: \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc" Oct 08 07:50:21 crc kubenswrapper[4693]: I1008 07:50:21.088448 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lbtr\" (UniqueName: \"kubernetes.io/projected/0d3eae26-e892-4687-bd4c-4cbd1a566e56-kube-api-access-6lbtr\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc\" (UID: \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc" Oct 08 07:50:21 crc kubenswrapper[4693]: I1008 07:50:21.161108 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc" Oct 08 07:50:21 crc kubenswrapper[4693]: I1008 07:50:21.711084 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc"] Oct 08 07:50:21 crc kubenswrapper[4693]: W1008 07:50:21.719377 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0d3eae26_e892_4687_bd4c_4cbd1a566e56.slice/crio-a193713794f9fe6a0048f741c9402da37b648e26fe5be62e714356d05f5c269b WatchSource:0}: Error finding container a193713794f9fe6a0048f741c9402da37b648e26fe5be62e714356d05f5c269b: Status 404 returned error can't find the container with id a193713794f9fe6a0048f741c9402da37b648e26fe5be62e714356d05f5c269b Oct 08 07:50:21 crc kubenswrapper[4693]: I1008 07:50:21.746953 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc" event={"ID":"0d3eae26-e892-4687-bd4c-4cbd1a566e56","Type":"ContainerStarted","Data":"a193713794f9fe6a0048f741c9402da37b648e26fe5be62e714356d05f5c269b"} Oct 08 07:50:22 crc kubenswrapper[4693]: I1008 07:50:22.761023 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc" event={"ID":"0d3eae26-e892-4687-bd4c-4cbd1a566e56","Type":"ContainerStarted","Data":"6583650905ba46e1b191745a52d90ee29cee9fc06f7435b4d1c34e049212ea1b"} Oct 08 07:50:22 crc kubenswrapper[4693]: I1008 07:50:22.797908 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc" podStartSLOduration=2.330615535 podStartE2EDuration="2.797886171s" podCreationTimestamp="2025-10-08 07:50:20 +0000 UTC" firstStartedPulling="2025-10-08 07:50:21.721594594 +0000 UTC m=+2007.092559529" lastFinishedPulling="2025-10-08 07:50:22.18886523 +0000 UTC m=+2007.559830165" observedRunningTime="2025-10-08 07:50:22.78857565 +0000 UTC m=+2008.159540595" watchObservedRunningTime="2025-10-08 07:50:22.797886171 +0000 UTC m=+2008.168851116" Oct 08 07:51:07 crc kubenswrapper[4693]: I1008 07:51:07.727872 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hsp8j"] Oct 08 07:51:07 crc kubenswrapper[4693]: I1008 07:51:07.731983 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hsp8j" Oct 08 07:51:07 crc kubenswrapper[4693]: I1008 07:51:07.743333 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hsp8j"] Oct 08 07:51:07 crc kubenswrapper[4693]: I1008 07:51:07.812708 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c584d1d3-0869-4d7b-a6c2-09e1f1119065-catalog-content\") pod \"certified-operators-hsp8j\" (UID: \"c584d1d3-0869-4d7b-a6c2-09e1f1119065\") " pod="openshift-marketplace/certified-operators-hsp8j" Oct 08 07:51:07 crc kubenswrapper[4693]: I1008 07:51:07.812753 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c584d1d3-0869-4d7b-a6c2-09e1f1119065-utilities\") pod \"certified-operators-hsp8j\" (UID: \"c584d1d3-0869-4d7b-a6c2-09e1f1119065\") " pod="openshift-marketplace/certified-operators-hsp8j" Oct 08 07:51:07 crc kubenswrapper[4693]: I1008 07:51:07.812944 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q56nt\" (UniqueName: \"kubernetes.io/projected/c584d1d3-0869-4d7b-a6c2-09e1f1119065-kube-api-access-q56nt\") pod \"certified-operators-hsp8j\" (UID: \"c584d1d3-0869-4d7b-a6c2-09e1f1119065\") " pod="openshift-marketplace/certified-operators-hsp8j" Oct 08 07:51:07 crc kubenswrapper[4693]: I1008 07:51:07.914035 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q56nt\" (UniqueName: \"kubernetes.io/projected/c584d1d3-0869-4d7b-a6c2-09e1f1119065-kube-api-access-q56nt\") pod \"certified-operators-hsp8j\" (UID: \"c584d1d3-0869-4d7b-a6c2-09e1f1119065\") " pod="openshift-marketplace/certified-operators-hsp8j" Oct 08 07:51:07 crc kubenswrapper[4693]: I1008 07:51:07.914104 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c584d1d3-0869-4d7b-a6c2-09e1f1119065-catalog-content\") pod \"certified-operators-hsp8j\" (UID: \"c584d1d3-0869-4d7b-a6c2-09e1f1119065\") " pod="openshift-marketplace/certified-operators-hsp8j" Oct 08 07:51:07 crc kubenswrapper[4693]: I1008 07:51:07.914122 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c584d1d3-0869-4d7b-a6c2-09e1f1119065-utilities\") pod \"certified-operators-hsp8j\" (UID: \"c584d1d3-0869-4d7b-a6c2-09e1f1119065\") " pod="openshift-marketplace/certified-operators-hsp8j" Oct 08 07:51:07 crc kubenswrapper[4693]: I1008 07:51:07.914569 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c584d1d3-0869-4d7b-a6c2-09e1f1119065-utilities\") pod \"certified-operators-hsp8j\" (UID: \"c584d1d3-0869-4d7b-a6c2-09e1f1119065\") " pod="openshift-marketplace/certified-operators-hsp8j" Oct 08 07:51:07 crc kubenswrapper[4693]: I1008 07:51:07.914756 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c584d1d3-0869-4d7b-a6c2-09e1f1119065-catalog-content\") pod \"certified-operators-hsp8j\" (UID: \"c584d1d3-0869-4d7b-a6c2-09e1f1119065\") " pod="openshift-marketplace/certified-operators-hsp8j" Oct 08 07:51:07 crc kubenswrapper[4693]: I1008 07:51:07.955127 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q56nt\" (UniqueName: \"kubernetes.io/projected/c584d1d3-0869-4d7b-a6c2-09e1f1119065-kube-api-access-q56nt\") pod \"certified-operators-hsp8j\" (UID: \"c584d1d3-0869-4d7b-a6c2-09e1f1119065\") " pod="openshift-marketplace/certified-operators-hsp8j" Oct 08 07:51:08 crc kubenswrapper[4693]: I1008 07:51:08.086202 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hsp8j" Oct 08 07:51:08 crc kubenswrapper[4693]: I1008 07:51:08.635999 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hsp8j"] Oct 08 07:51:09 crc kubenswrapper[4693]: I1008 07:51:09.315098 4693 generic.go:334] "Generic (PLEG): container finished" podID="c584d1d3-0869-4d7b-a6c2-09e1f1119065" containerID="21f07a5d68ff415af3501ff25ad28c67fce96d46d337fe8b107e0b33fed76dd6" exitCode=0 Oct 08 07:51:09 crc kubenswrapper[4693]: I1008 07:51:09.315208 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsp8j" event={"ID":"c584d1d3-0869-4d7b-a6c2-09e1f1119065","Type":"ContainerDied","Data":"21f07a5d68ff415af3501ff25ad28c67fce96d46d337fe8b107e0b33fed76dd6"} Oct 08 07:51:09 crc kubenswrapper[4693]: I1008 07:51:09.315475 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsp8j" event={"ID":"c584d1d3-0869-4d7b-a6c2-09e1f1119065","Type":"ContainerStarted","Data":"a4c33d8c5b401309b189dc64731d074dfc1441449ba65858cf08e166c63c3f0f"} Oct 08 07:51:11 crc kubenswrapper[4693]: I1008 07:51:11.342489 4693 generic.go:334] "Generic (PLEG): container finished" podID="c584d1d3-0869-4d7b-a6c2-09e1f1119065" containerID="5cb206555896c0c8a4c91a935fbd9417c99963fc23e84063180b8ccd5742ebe8" exitCode=0 Oct 08 07:51:11 crc kubenswrapper[4693]: I1008 07:51:11.342553 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsp8j" event={"ID":"c584d1d3-0869-4d7b-a6c2-09e1f1119065","Type":"ContainerDied","Data":"5cb206555896c0c8a4c91a935fbd9417c99963fc23e84063180b8ccd5742ebe8"} Oct 08 07:51:12 crc kubenswrapper[4693]: I1008 07:51:12.358165 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsp8j" event={"ID":"c584d1d3-0869-4d7b-a6c2-09e1f1119065","Type":"ContainerStarted","Data":"dfb545c482201cba8f6e373e4c0a6fa2681c7c89e7cf5d07d886000f2d192c9f"} Oct 08 07:51:12 crc kubenswrapper[4693]: I1008 07:51:12.383011 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hsp8j" podStartSLOduration=2.785551817 podStartE2EDuration="5.382985834s" podCreationTimestamp="2025-10-08 07:51:07 +0000 UTC" firstStartedPulling="2025-10-08 07:51:09.31903196 +0000 UTC m=+2054.689996935" lastFinishedPulling="2025-10-08 07:51:11.916466017 +0000 UTC m=+2057.287430952" observedRunningTime="2025-10-08 07:51:12.376573227 +0000 UTC m=+2057.747538182" watchObservedRunningTime="2025-10-08 07:51:12.382985834 +0000 UTC m=+2057.753950769" Oct 08 07:51:14 crc kubenswrapper[4693]: I1008 07:51:14.097547 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-m4nz2"] Oct 08 07:51:14 crc kubenswrapper[4693]: I1008 07:51:14.099480 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m4nz2" Oct 08 07:51:14 crc kubenswrapper[4693]: I1008 07:51:14.121169 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m4nz2"] Oct 08 07:51:14 crc kubenswrapper[4693]: I1008 07:51:14.281765 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d409765-24d9-4dab-b5a1-27dd9eb98a57-utilities\") pod \"redhat-operators-m4nz2\" (UID: \"7d409765-24d9-4dab-b5a1-27dd9eb98a57\") " pod="openshift-marketplace/redhat-operators-m4nz2" Oct 08 07:51:14 crc kubenswrapper[4693]: I1008 07:51:14.281915 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zh4bn\" (UniqueName: \"kubernetes.io/projected/7d409765-24d9-4dab-b5a1-27dd9eb98a57-kube-api-access-zh4bn\") pod \"redhat-operators-m4nz2\" (UID: \"7d409765-24d9-4dab-b5a1-27dd9eb98a57\") " pod="openshift-marketplace/redhat-operators-m4nz2" Oct 08 07:51:14 crc kubenswrapper[4693]: I1008 07:51:14.282145 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d409765-24d9-4dab-b5a1-27dd9eb98a57-catalog-content\") pod \"redhat-operators-m4nz2\" (UID: \"7d409765-24d9-4dab-b5a1-27dd9eb98a57\") " pod="openshift-marketplace/redhat-operators-m4nz2" Oct 08 07:51:14 crc kubenswrapper[4693]: I1008 07:51:14.383502 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d409765-24d9-4dab-b5a1-27dd9eb98a57-utilities\") pod \"redhat-operators-m4nz2\" (UID: \"7d409765-24d9-4dab-b5a1-27dd9eb98a57\") " pod="openshift-marketplace/redhat-operators-m4nz2" Oct 08 07:51:14 crc kubenswrapper[4693]: I1008 07:51:14.383578 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zh4bn\" (UniqueName: \"kubernetes.io/projected/7d409765-24d9-4dab-b5a1-27dd9eb98a57-kube-api-access-zh4bn\") pod \"redhat-operators-m4nz2\" (UID: \"7d409765-24d9-4dab-b5a1-27dd9eb98a57\") " pod="openshift-marketplace/redhat-operators-m4nz2" Oct 08 07:51:14 crc kubenswrapper[4693]: I1008 07:51:14.383633 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d409765-24d9-4dab-b5a1-27dd9eb98a57-catalog-content\") pod \"redhat-operators-m4nz2\" (UID: \"7d409765-24d9-4dab-b5a1-27dd9eb98a57\") " pod="openshift-marketplace/redhat-operators-m4nz2" Oct 08 07:51:14 crc kubenswrapper[4693]: I1008 07:51:14.384256 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d409765-24d9-4dab-b5a1-27dd9eb98a57-catalog-content\") pod \"redhat-operators-m4nz2\" (UID: \"7d409765-24d9-4dab-b5a1-27dd9eb98a57\") " pod="openshift-marketplace/redhat-operators-m4nz2" Oct 08 07:51:14 crc kubenswrapper[4693]: I1008 07:51:14.384424 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d409765-24d9-4dab-b5a1-27dd9eb98a57-utilities\") pod \"redhat-operators-m4nz2\" (UID: \"7d409765-24d9-4dab-b5a1-27dd9eb98a57\") " pod="openshift-marketplace/redhat-operators-m4nz2" Oct 08 07:51:14 crc kubenswrapper[4693]: I1008 07:51:14.410084 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zh4bn\" (UniqueName: \"kubernetes.io/projected/7d409765-24d9-4dab-b5a1-27dd9eb98a57-kube-api-access-zh4bn\") pod \"redhat-operators-m4nz2\" (UID: \"7d409765-24d9-4dab-b5a1-27dd9eb98a57\") " pod="openshift-marketplace/redhat-operators-m4nz2" Oct 08 07:51:14 crc kubenswrapper[4693]: I1008 07:51:14.419648 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m4nz2" Oct 08 07:51:14 crc kubenswrapper[4693]: W1008 07:51:14.871147 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7d409765_24d9_4dab_b5a1_27dd9eb98a57.slice/crio-a5aed07f6491a369d1aa054c75fcc9de8e4bc21e1c33ede525d4bed605a23df4 WatchSource:0}: Error finding container a5aed07f6491a369d1aa054c75fcc9de8e4bc21e1c33ede525d4bed605a23df4: Status 404 returned error can't find the container with id a5aed07f6491a369d1aa054c75fcc9de8e4bc21e1c33ede525d4bed605a23df4 Oct 08 07:51:14 crc kubenswrapper[4693]: I1008 07:51:14.871438 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m4nz2"] Oct 08 07:51:15 crc kubenswrapper[4693]: I1008 07:51:15.396435 4693 generic.go:334] "Generic (PLEG): container finished" podID="7d409765-24d9-4dab-b5a1-27dd9eb98a57" containerID="2f385996fbe68a0f30a0b629d82b28d0c47fba8124cb90f2f31c759b3d5fdfe0" exitCode=0 Oct 08 07:51:15 crc kubenswrapper[4693]: I1008 07:51:15.396476 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m4nz2" event={"ID":"7d409765-24d9-4dab-b5a1-27dd9eb98a57","Type":"ContainerDied","Data":"2f385996fbe68a0f30a0b629d82b28d0c47fba8124cb90f2f31c759b3d5fdfe0"} Oct 08 07:51:15 crc kubenswrapper[4693]: I1008 07:51:15.396500 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m4nz2" event={"ID":"7d409765-24d9-4dab-b5a1-27dd9eb98a57","Type":"ContainerStarted","Data":"a5aed07f6491a369d1aa054c75fcc9de8e4bc21e1c33ede525d4bed605a23df4"} Oct 08 07:51:17 crc kubenswrapper[4693]: I1008 07:51:17.428720 4693 generic.go:334] "Generic (PLEG): container finished" podID="0d3eae26-e892-4687-bd4c-4cbd1a566e56" containerID="6583650905ba46e1b191745a52d90ee29cee9fc06f7435b4d1c34e049212ea1b" exitCode=0 Oct 08 07:51:17 crc kubenswrapper[4693]: I1008 07:51:17.428800 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc" event={"ID":"0d3eae26-e892-4687-bd4c-4cbd1a566e56","Type":"ContainerDied","Data":"6583650905ba46e1b191745a52d90ee29cee9fc06f7435b4d1c34e049212ea1b"} Oct 08 07:51:17 crc kubenswrapper[4693]: I1008 07:51:17.434637 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m4nz2" event={"ID":"7d409765-24d9-4dab-b5a1-27dd9eb98a57","Type":"ContainerStarted","Data":"0ea868c007f5b1c5d15f47eab2fba7fc7f88445c3e5e9c96548b0ed4ca373702"} Oct 08 07:51:18 crc kubenswrapper[4693]: I1008 07:51:18.086496 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hsp8j" Oct 08 07:51:18 crc kubenswrapper[4693]: I1008 07:51:18.086589 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hsp8j" Oct 08 07:51:18 crc kubenswrapper[4693]: I1008 07:51:18.162059 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hsp8j" Oct 08 07:51:18 crc kubenswrapper[4693]: E1008 07:51:18.327953 4693 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7d409765_24d9_4dab_b5a1_27dd9eb98a57.slice/crio-conmon-0ea868c007f5b1c5d15f47eab2fba7fc7f88445c3e5e9c96548b0ed4ca373702.scope\": RecentStats: unable to find data in memory cache]" Oct 08 07:51:18 crc kubenswrapper[4693]: I1008 07:51:18.449025 4693 generic.go:334] "Generic (PLEG): container finished" podID="7d409765-24d9-4dab-b5a1-27dd9eb98a57" containerID="0ea868c007f5b1c5d15f47eab2fba7fc7f88445c3e5e9c96548b0ed4ca373702" exitCode=0 Oct 08 07:51:18 crc kubenswrapper[4693]: I1008 07:51:18.449133 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m4nz2" event={"ID":"7d409765-24d9-4dab-b5a1-27dd9eb98a57","Type":"ContainerDied","Data":"0ea868c007f5b1c5d15f47eab2fba7fc7f88445c3e5e9c96548b0ed4ca373702"} Oct 08 07:51:18 crc kubenswrapper[4693]: I1008 07:51:18.519673 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hsp8j" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.058375 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.084689 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-neutron-ovn-metadata-agent-neutron-config-0\") pod \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\" (UID: \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\") " Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.084737 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-nova-metadata-neutron-config-0\") pod \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\" (UID: \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\") " Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.084790 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-inventory\") pod \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\" (UID: \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\") " Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.084870 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-neutron-metadata-combined-ca-bundle\") pod \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\" (UID: \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\") " Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.084903 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-ssh-key\") pod \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\" (UID: \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\") " Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.084940 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lbtr\" (UniqueName: \"kubernetes.io/projected/0d3eae26-e892-4687-bd4c-4cbd1a566e56-kube-api-access-6lbtr\") pod \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\" (UID: \"0d3eae26-e892-4687-bd4c-4cbd1a566e56\") " Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.106037 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d3eae26-e892-4687-bd4c-4cbd1a566e56-kube-api-access-6lbtr" (OuterVolumeSpecName: "kube-api-access-6lbtr") pod "0d3eae26-e892-4687-bd4c-4cbd1a566e56" (UID: "0d3eae26-e892-4687-bd4c-4cbd1a566e56"). InnerVolumeSpecName "kube-api-access-6lbtr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.106216 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "0d3eae26-e892-4687-bd4c-4cbd1a566e56" (UID: "0d3eae26-e892-4687-bd4c-4cbd1a566e56"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.124243 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-inventory" (OuterVolumeSpecName: "inventory") pod "0d3eae26-e892-4687-bd4c-4cbd1a566e56" (UID: "0d3eae26-e892-4687-bd4c-4cbd1a566e56"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.138072 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0d3eae26-e892-4687-bd4c-4cbd1a566e56" (UID: "0d3eae26-e892-4687-bd4c-4cbd1a566e56"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.144174 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "0d3eae26-e892-4687-bd4c-4cbd1a566e56" (UID: "0d3eae26-e892-4687-bd4c-4cbd1a566e56"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.154125 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "0d3eae26-e892-4687-bd4c-4cbd1a566e56" (UID: "0d3eae26-e892-4687-bd4c-4cbd1a566e56"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.185957 4693 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.186255 4693 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-inventory\") on node \"crc\" DevicePath \"\"" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.186270 4693 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.186285 4693 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.186298 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lbtr\" (UniqueName: \"kubernetes.io/projected/0d3eae26-e892-4687-bd4c-4cbd1a566e56-kube-api-access-6lbtr\") on node \"crc\" DevicePath \"\"" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.186326 4693 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0d3eae26-e892-4687-bd4c-4cbd1a566e56-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.463316 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc" event={"ID":"0d3eae26-e892-4687-bd4c-4cbd1a566e56","Type":"ContainerDied","Data":"a193713794f9fe6a0048f741c9402da37b648e26fe5be62e714356d05f5c269b"} Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.463371 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a193713794f9fe6a0048f741c9402da37b648e26fe5be62e714356d05f5c269b" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.463469 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.468584 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m4nz2" event={"ID":"7d409765-24d9-4dab-b5a1-27dd9eb98a57","Type":"ContainerStarted","Data":"2309f2ffdd153766c615a747ffba36becd1e5904ff15c5dbff035791c741894f"} Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.498527 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-m4nz2" podStartSLOduration=2.024535293 podStartE2EDuration="5.498512237s" podCreationTimestamp="2025-10-08 07:51:14 +0000 UTC" firstStartedPulling="2025-10-08 07:51:15.398514611 +0000 UTC m=+2060.769479546" lastFinishedPulling="2025-10-08 07:51:18.872491515 +0000 UTC m=+2064.243456490" observedRunningTime="2025-10-08 07:51:19.497476201 +0000 UTC m=+2064.868441156" watchObservedRunningTime="2025-10-08 07:51:19.498512237 +0000 UTC m=+2064.869477172" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.579149 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk"] Oct 08 07:51:19 crc kubenswrapper[4693]: E1008 07:51:19.579609 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d3eae26-e892-4687-bd4c-4cbd1a566e56" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.579632 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d3eae26-e892-4687-bd4c-4cbd1a566e56" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.579955 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d3eae26-e892-4687-bd4c-4cbd1a566e56" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.580654 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.583299 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.584259 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.591198 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/014202b7-db23-455e-ba57-d12d3b6e2975-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk\" (UID: \"014202b7-db23-455e-ba57-d12d3b6e2975\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.591252 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/014202b7-db23-455e-ba57-d12d3b6e2975-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk\" (UID: \"014202b7-db23-455e-ba57-d12d3b6e2975\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.591280 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk"] Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.591286 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/014202b7-db23-455e-ba57-d12d3b6e2975-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk\" (UID: \"014202b7-db23-455e-ba57-d12d3b6e2975\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.591507 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/014202b7-db23-455e-ba57-d12d3b6e2975-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk\" (UID: \"014202b7-db23-455e-ba57-d12d3b6e2975\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.591582 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzwsv\" (UniqueName: \"kubernetes.io/projected/014202b7-db23-455e-ba57-d12d3b6e2975-kube-api-access-lzwsv\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk\" (UID: \"014202b7-db23-455e-ba57-d12d3b6e2975\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.598392 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.598941 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zxm2d" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.599141 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.695929 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/014202b7-db23-455e-ba57-d12d3b6e2975-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk\" (UID: \"014202b7-db23-455e-ba57-d12d3b6e2975\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.696013 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzwsv\" (UniqueName: \"kubernetes.io/projected/014202b7-db23-455e-ba57-d12d3b6e2975-kube-api-access-lzwsv\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk\" (UID: \"014202b7-db23-455e-ba57-d12d3b6e2975\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.696098 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/014202b7-db23-455e-ba57-d12d3b6e2975-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk\" (UID: \"014202b7-db23-455e-ba57-d12d3b6e2975\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.696151 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/014202b7-db23-455e-ba57-d12d3b6e2975-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk\" (UID: \"014202b7-db23-455e-ba57-d12d3b6e2975\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.696184 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/014202b7-db23-455e-ba57-d12d3b6e2975-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk\" (UID: \"014202b7-db23-455e-ba57-d12d3b6e2975\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.700168 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/014202b7-db23-455e-ba57-d12d3b6e2975-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk\" (UID: \"014202b7-db23-455e-ba57-d12d3b6e2975\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.701415 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/014202b7-db23-455e-ba57-d12d3b6e2975-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk\" (UID: \"014202b7-db23-455e-ba57-d12d3b6e2975\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.702432 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/014202b7-db23-455e-ba57-d12d3b6e2975-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk\" (UID: \"014202b7-db23-455e-ba57-d12d3b6e2975\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.709093 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/014202b7-db23-455e-ba57-d12d3b6e2975-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk\" (UID: \"014202b7-db23-455e-ba57-d12d3b6e2975\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.714863 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzwsv\" (UniqueName: \"kubernetes.io/projected/014202b7-db23-455e-ba57-d12d3b6e2975-kube-api-access-lzwsv\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk\" (UID: \"014202b7-db23-455e-ba57-d12d3b6e2975\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk" Oct 08 07:51:19 crc kubenswrapper[4693]: I1008 07:51:19.899239 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk" Oct 08 07:51:20 crc kubenswrapper[4693]: I1008 07:51:20.089953 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hsp8j"] Oct 08 07:51:20 crc kubenswrapper[4693]: I1008 07:51:20.489726 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hsp8j" podUID="c584d1d3-0869-4d7b-a6c2-09e1f1119065" containerName="registry-server" containerID="cri-o://dfb545c482201cba8f6e373e4c0a6fa2681c7c89e7cf5d07d886000f2d192c9f" gracePeriod=2 Oct 08 07:51:20 crc kubenswrapper[4693]: I1008 07:51:20.501396 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk"] Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.065566 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hsp8j" Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.122194 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c584d1d3-0869-4d7b-a6c2-09e1f1119065-catalog-content\") pod \"c584d1d3-0869-4d7b-a6c2-09e1f1119065\" (UID: \"c584d1d3-0869-4d7b-a6c2-09e1f1119065\") " Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.122244 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c584d1d3-0869-4d7b-a6c2-09e1f1119065-utilities\") pod \"c584d1d3-0869-4d7b-a6c2-09e1f1119065\" (UID: \"c584d1d3-0869-4d7b-a6c2-09e1f1119065\") " Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.122367 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q56nt\" (UniqueName: \"kubernetes.io/projected/c584d1d3-0869-4d7b-a6c2-09e1f1119065-kube-api-access-q56nt\") pod \"c584d1d3-0869-4d7b-a6c2-09e1f1119065\" (UID: \"c584d1d3-0869-4d7b-a6c2-09e1f1119065\") " Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.123319 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c584d1d3-0869-4d7b-a6c2-09e1f1119065-utilities" (OuterVolumeSpecName: "utilities") pod "c584d1d3-0869-4d7b-a6c2-09e1f1119065" (UID: "c584d1d3-0869-4d7b-a6c2-09e1f1119065"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.126514 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c584d1d3-0869-4d7b-a6c2-09e1f1119065-kube-api-access-q56nt" (OuterVolumeSpecName: "kube-api-access-q56nt") pod "c584d1d3-0869-4d7b-a6c2-09e1f1119065" (UID: "c584d1d3-0869-4d7b-a6c2-09e1f1119065"). InnerVolumeSpecName "kube-api-access-q56nt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.197275 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c584d1d3-0869-4d7b-a6c2-09e1f1119065-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c584d1d3-0869-4d7b-a6c2-09e1f1119065" (UID: "c584d1d3-0869-4d7b-a6c2-09e1f1119065"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.224063 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q56nt\" (UniqueName: \"kubernetes.io/projected/c584d1d3-0869-4d7b-a6c2-09e1f1119065-kube-api-access-q56nt\") on node \"crc\" DevicePath \"\"" Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.224093 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c584d1d3-0869-4d7b-a6c2-09e1f1119065-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.224102 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c584d1d3-0869-4d7b-a6c2-09e1f1119065-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.498090 4693 generic.go:334] "Generic (PLEG): container finished" podID="c584d1d3-0869-4d7b-a6c2-09e1f1119065" containerID="dfb545c482201cba8f6e373e4c0a6fa2681c7c89e7cf5d07d886000f2d192c9f" exitCode=0 Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.498147 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsp8j" event={"ID":"c584d1d3-0869-4d7b-a6c2-09e1f1119065","Type":"ContainerDied","Data":"dfb545c482201cba8f6e373e4c0a6fa2681c7c89e7cf5d07d886000f2d192c9f"} Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.498192 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hsp8j" Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.498508 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsp8j" event={"ID":"c584d1d3-0869-4d7b-a6c2-09e1f1119065","Type":"ContainerDied","Data":"a4c33d8c5b401309b189dc64731d074dfc1441449ba65858cf08e166c63c3f0f"} Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.498537 4693 scope.go:117] "RemoveContainer" containerID="dfb545c482201cba8f6e373e4c0a6fa2681c7c89e7cf5d07d886000f2d192c9f" Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.500880 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk" event={"ID":"014202b7-db23-455e-ba57-d12d3b6e2975","Type":"ContainerStarted","Data":"2e123601c08e82b2bdcc10d6399df91cbdfb900d7d5439e1d0a4b9bc918f07c7"} Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.500917 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk" event={"ID":"014202b7-db23-455e-ba57-d12d3b6e2975","Type":"ContainerStarted","Data":"4e27f5fcd78d26d10b0f97ba8b83e0a05cfafe6115c76745f6d7b590953707b5"} Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.520258 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk" podStartSLOduration=2.105288169 podStartE2EDuration="2.520237117s" podCreationTimestamp="2025-10-08 07:51:19 +0000 UTC" firstStartedPulling="2025-10-08 07:51:20.51831037 +0000 UTC m=+2065.889275345" lastFinishedPulling="2025-10-08 07:51:20.933259358 +0000 UTC m=+2066.304224293" observedRunningTime="2025-10-08 07:51:21.516130641 +0000 UTC m=+2066.887095596" watchObservedRunningTime="2025-10-08 07:51:21.520237117 +0000 UTC m=+2066.891202042" Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.533593 4693 scope.go:117] "RemoveContainer" containerID="5cb206555896c0c8a4c91a935fbd9417c99963fc23e84063180b8ccd5742ebe8" Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.539861 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hsp8j"] Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.556201 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hsp8j"] Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.561746 4693 scope.go:117] "RemoveContainer" containerID="21f07a5d68ff415af3501ff25ad28c67fce96d46d337fe8b107e0b33fed76dd6" Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.583707 4693 scope.go:117] "RemoveContainer" containerID="dfb545c482201cba8f6e373e4c0a6fa2681c7c89e7cf5d07d886000f2d192c9f" Oct 08 07:51:21 crc kubenswrapper[4693]: E1008 07:51:21.584173 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dfb545c482201cba8f6e373e4c0a6fa2681c7c89e7cf5d07d886000f2d192c9f\": container with ID starting with dfb545c482201cba8f6e373e4c0a6fa2681c7c89e7cf5d07d886000f2d192c9f not found: ID does not exist" containerID="dfb545c482201cba8f6e373e4c0a6fa2681c7c89e7cf5d07d886000f2d192c9f" Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.584213 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfb545c482201cba8f6e373e4c0a6fa2681c7c89e7cf5d07d886000f2d192c9f"} err="failed to get container status \"dfb545c482201cba8f6e373e4c0a6fa2681c7c89e7cf5d07d886000f2d192c9f\": rpc error: code = NotFound desc = could not find container \"dfb545c482201cba8f6e373e4c0a6fa2681c7c89e7cf5d07d886000f2d192c9f\": container with ID starting with dfb545c482201cba8f6e373e4c0a6fa2681c7c89e7cf5d07d886000f2d192c9f not found: ID does not exist" Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.584238 4693 scope.go:117] "RemoveContainer" containerID="5cb206555896c0c8a4c91a935fbd9417c99963fc23e84063180b8ccd5742ebe8" Oct 08 07:51:21 crc kubenswrapper[4693]: E1008 07:51:21.584520 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5cb206555896c0c8a4c91a935fbd9417c99963fc23e84063180b8ccd5742ebe8\": container with ID starting with 5cb206555896c0c8a4c91a935fbd9417c99963fc23e84063180b8ccd5742ebe8 not found: ID does not exist" containerID="5cb206555896c0c8a4c91a935fbd9417c99963fc23e84063180b8ccd5742ebe8" Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.584543 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5cb206555896c0c8a4c91a935fbd9417c99963fc23e84063180b8ccd5742ebe8"} err="failed to get container status \"5cb206555896c0c8a4c91a935fbd9417c99963fc23e84063180b8ccd5742ebe8\": rpc error: code = NotFound desc = could not find container \"5cb206555896c0c8a4c91a935fbd9417c99963fc23e84063180b8ccd5742ebe8\": container with ID starting with 5cb206555896c0c8a4c91a935fbd9417c99963fc23e84063180b8ccd5742ebe8 not found: ID does not exist" Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.584556 4693 scope.go:117] "RemoveContainer" containerID="21f07a5d68ff415af3501ff25ad28c67fce96d46d337fe8b107e0b33fed76dd6" Oct 08 07:51:21 crc kubenswrapper[4693]: E1008 07:51:21.584761 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21f07a5d68ff415af3501ff25ad28c67fce96d46d337fe8b107e0b33fed76dd6\": container with ID starting with 21f07a5d68ff415af3501ff25ad28c67fce96d46d337fe8b107e0b33fed76dd6 not found: ID does not exist" containerID="21f07a5d68ff415af3501ff25ad28c67fce96d46d337fe8b107e0b33fed76dd6" Oct 08 07:51:21 crc kubenswrapper[4693]: I1008 07:51:21.584779 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21f07a5d68ff415af3501ff25ad28c67fce96d46d337fe8b107e0b33fed76dd6"} err="failed to get container status \"21f07a5d68ff415af3501ff25ad28c67fce96d46d337fe8b107e0b33fed76dd6\": rpc error: code = NotFound desc = could not find container \"21f07a5d68ff415af3501ff25ad28c67fce96d46d337fe8b107e0b33fed76dd6\": container with ID starting with 21f07a5d68ff415af3501ff25ad28c67fce96d46d337fe8b107e0b33fed76dd6 not found: ID does not exist" Oct 08 07:51:23 crc kubenswrapper[4693]: I1008 07:51:23.373420 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c584d1d3-0869-4d7b-a6c2-09e1f1119065" path="/var/lib/kubelet/pods/c584d1d3-0869-4d7b-a6c2-09e1f1119065/volumes" Oct 08 07:51:24 crc kubenswrapper[4693]: I1008 07:51:24.420302 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-m4nz2" Oct 08 07:51:24 crc kubenswrapper[4693]: I1008 07:51:24.421013 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-m4nz2" Oct 08 07:51:24 crc kubenswrapper[4693]: I1008 07:51:24.483696 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-m4nz2" Oct 08 07:51:24 crc kubenswrapper[4693]: I1008 07:51:24.614041 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-m4nz2" Oct 08 07:51:25 crc kubenswrapper[4693]: I1008 07:51:25.492005 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m4nz2"] Oct 08 07:51:26 crc kubenswrapper[4693]: I1008 07:51:26.553332 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-m4nz2" podUID="7d409765-24d9-4dab-b5a1-27dd9eb98a57" containerName="registry-server" containerID="cri-o://2309f2ffdd153766c615a747ffba36becd1e5904ff15c5dbff035791c741894f" gracePeriod=2 Oct 08 07:51:27 crc kubenswrapper[4693]: I1008 07:51:27.037293 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m4nz2" Oct 08 07:51:27 crc kubenswrapper[4693]: I1008 07:51:27.227702 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d409765-24d9-4dab-b5a1-27dd9eb98a57-utilities\") pod \"7d409765-24d9-4dab-b5a1-27dd9eb98a57\" (UID: \"7d409765-24d9-4dab-b5a1-27dd9eb98a57\") " Oct 08 07:51:27 crc kubenswrapper[4693]: I1008 07:51:27.228165 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zh4bn\" (UniqueName: \"kubernetes.io/projected/7d409765-24d9-4dab-b5a1-27dd9eb98a57-kube-api-access-zh4bn\") pod \"7d409765-24d9-4dab-b5a1-27dd9eb98a57\" (UID: \"7d409765-24d9-4dab-b5a1-27dd9eb98a57\") " Oct 08 07:51:27 crc kubenswrapper[4693]: I1008 07:51:27.228197 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d409765-24d9-4dab-b5a1-27dd9eb98a57-catalog-content\") pod \"7d409765-24d9-4dab-b5a1-27dd9eb98a57\" (UID: \"7d409765-24d9-4dab-b5a1-27dd9eb98a57\") " Oct 08 07:51:27 crc kubenswrapper[4693]: I1008 07:51:27.229973 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d409765-24d9-4dab-b5a1-27dd9eb98a57-utilities" (OuterVolumeSpecName: "utilities") pod "7d409765-24d9-4dab-b5a1-27dd9eb98a57" (UID: "7d409765-24d9-4dab-b5a1-27dd9eb98a57"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:51:27 crc kubenswrapper[4693]: I1008 07:51:27.241092 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d409765-24d9-4dab-b5a1-27dd9eb98a57-kube-api-access-zh4bn" (OuterVolumeSpecName: "kube-api-access-zh4bn") pod "7d409765-24d9-4dab-b5a1-27dd9eb98a57" (UID: "7d409765-24d9-4dab-b5a1-27dd9eb98a57"). InnerVolumeSpecName "kube-api-access-zh4bn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:51:27 crc kubenswrapper[4693]: I1008 07:51:27.311749 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d409765-24d9-4dab-b5a1-27dd9eb98a57-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7d409765-24d9-4dab-b5a1-27dd9eb98a57" (UID: "7d409765-24d9-4dab-b5a1-27dd9eb98a57"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:51:27 crc kubenswrapper[4693]: I1008 07:51:27.330509 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zh4bn\" (UniqueName: \"kubernetes.io/projected/7d409765-24d9-4dab-b5a1-27dd9eb98a57-kube-api-access-zh4bn\") on node \"crc\" DevicePath \"\"" Oct 08 07:51:27 crc kubenswrapper[4693]: I1008 07:51:27.330539 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d409765-24d9-4dab-b5a1-27dd9eb98a57-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 07:51:27 crc kubenswrapper[4693]: I1008 07:51:27.330551 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d409765-24d9-4dab-b5a1-27dd9eb98a57-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 07:51:27 crc kubenswrapper[4693]: I1008 07:51:27.564382 4693 generic.go:334] "Generic (PLEG): container finished" podID="7d409765-24d9-4dab-b5a1-27dd9eb98a57" containerID="2309f2ffdd153766c615a747ffba36becd1e5904ff15c5dbff035791c741894f" exitCode=0 Oct 08 07:51:27 crc kubenswrapper[4693]: I1008 07:51:27.564704 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m4nz2" event={"ID":"7d409765-24d9-4dab-b5a1-27dd9eb98a57","Type":"ContainerDied","Data":"2309f2ffdd153766c615a747ffba36becd1e5904ff15c5dbff035791c741894f"} Oct 08 07:51:27 crc kubenswrapper[4693]: I1008 07:51:27.564936 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m4nz2" event={"ID":"7d409765-24d9-4dab-b5a1-27dd9eb98a57","Type":"ContainerDied","Data":"a5aed07f6491a369d1aa054c75fcc9de8e4bc21e1c33ede525d4bed605a23df4"} Oct 08 07:51:27 crc kubenswrapper[4693]: I1008 07:51:27.564759 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m4nz2" Oct 08 07:51:27 crc kubenswrapper[4693]: I1008 07:51:27.564986 4693 scope.go:117] "RemoveContainer" containerID="2309f2ffdd153766c615a747ffba36becd1e5904ff15c5dbff035791c741894f" Oct 08 07:51:27 crc kubenswrapper[4693]: I1008 07:51:27.601929 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m4nz2"] Oct 08 07:51:27 crc kubenswrapper[4693]: I1008 07:51:27.611719 4693 scope.go:117] "RemoveContainer" containerID="0ea868c007f5b1c5d15f47eab2fba7fc7f88445c3e5e9c96548b0ed4ca373702" Oct 08 07:51:27 crc kubenswrapper[4693]: I1008 07:51:27.615319 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-m4nz2"] Oct 08 07:51:27 crc kubenswrapper[4693]: I1008 07:51:27.637407 4693 scope.go:117] "RemoveContainer" containerID="2f385996fbe68a0f30a0b629d82b28d0c47fba8124cb90f2f31c759b3d5fdfe0" Oct 08 07:51:27 crc kubenswrapper[4693]: I1008 07:51:27.698051 4693 scope.go:117] "RemoveContainer" containerID="2309f2ffdd153766c615a747ffba36becd1e5904ff15c5dbff035791c741894f" Oct 08 07:51:27 crc kubenswrapper[4693]: E1008 07:51:27.698591 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2309f2ffdd153766c615a747ffba36becd1e5904ff15c5dbff035791c741894f\": container with ID starting with 2309f2ffdd153766c615a747ffba36becd1e5904ff15c5dbff035791c741894f not found: ID does not exist" containerID="2309f2ffdd153766c615a747ffba36becd1e5904ff15c5dbff035791c741894f" Oct 08 07:51:27 crc kubenswrapper[4693]: I1008 07:51:27.698638 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2309f2ffdd153766c615a747ffba36becd1e5904ff15c5dbff035791c741894f"} err="failed to get container status \"2309f2ffdd153766c615a747ffba36becd1e5904ff15c5dbff035791c741894f\": rpc error: code = NotFound desc = could not find container \"2309f2ffdd153766c615a747ffba36becd1e5904ff15c5dbff035791c741894f\": container with ID starting with 2309f2ffdd153766c615a747ffba36becd1e5904ff15c5dbff035791c741894f not found: ID does not exist" Oct 08 07:51:27 crc kubenswrapper[4693]: I1008 07:51:27.698668 4693 scope.go:117] "RemoveContainer" containerID="0ea868c007f5b1c5d15f47eab2fba7fc7f88445c3e5e9c96548b0ed4ca373702" Oct 08 07:51:27 crc kubenswrapper[4693]: E1008 07:51:27.699431 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ea868c007f5b1c5d15f47eab2fba7fc7f88445c3e5e9c96548b0ed4ca373702\": container with ID starting with 0ea868c007f5b1c5d15f47eab2fba7fc7f88445c3e5e9c96548b0ed4ca373702 not found: ID does not exist" containerID="0ea868c007f5b1c5d15f47eab2fba7fc7f88445c3e5e9c96548b0ed4ca373702" Oct 08 07:51:27 crc kubenswrapper[4693]: I1008 07:51:27.699504 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ea868c007f5b1c5d15f47eab2fba7fc7f88445c3e5e9c96548b0ed4ca373702"} err="failed to get container status \"0ea868c007f5b1c5d15f47eab2fba7fc7f88445c3e5e9c96548b0ed4ca373702\": rpc error: code = NotFound desc = could not find container \"0ea868c007f5b1c5d15f47eab2fba7fc7f88445c3e5e9c96548b0ed4ca373702\": container with ID starting with 0ea868c007f5b1c5d15f47eab2fba7fc7f88445c3e5e9c96548b0ed4ca373702 not found: ID does not exist" Oct 08 07:51:27 crc kubenswrapper[4693]: I1008 07:51:27.699561 4693 scope.go:117] "RemoveContainer" containerID="2f385996fbe68a0f30a0b629d82b28d0c47fba8124cb90f2f31c759b3d5fdfe0" Oct 08 07:51:27 crc kubenswrapper[4693]: E1008 07:51:27.700010 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f385996fbe68a0f30a0b629d82b28d0c47fba8124cb90f2f31c759b3d5fdfe0\": container with ID starting with 2f385996fbe68a0f30a0b629d82b28d0c47fba8124cb90f2f31c759b3d5fdfe0 not found: ID does not exist" containerID="2f385996fbe68a0f30a0b629d82b28d0c47fba8124cb90f2f31c759b3d5fdfe0" Oct 08 07:51:27 crc kubenswrapper[4693]: I1008 07:51:27.700058 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f385996fbe68a0f30a0b629d82b28d0c47fba8124cb90f2f31c759b3d5fdfe0"} err="failed to get container status \"2f385996fbe68a0f30a0b629d82b28d0c47fba8124cb90f2f31c759b3d5fdfe0\": rpc error: code = NotFound desc = could not find container \"2f385996fbe68a0f30a0b629d82b28d0c47fba8124cb90f2f31c759b3d5fdfe0\": container with ID starting with 2f385996fbe68a0f30a0b629d82b28d0c47fba8124cb90f2f31c759b3d5fdfe0 not found: ID does not exist" Oct 08 07:51:29 crc kubenswrapper[4693]: I1008 07:51:29.381419 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d409765-24d9-4dab-b5a1-27dd9eb98a57" path="/var/lib/kubelet/pods/7d409765-24d9-4dab-b5a1-27dd9eb98a57/volumes" Oct 08 07:51:53 crc kubenswrapper[4693]: I1008 07:51:53.490480 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:51:53 crc kubenswrapper[4693]: I1008 07:51:53.491338 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:52:18 crc kubenswrapper[4693]: I1008 07:52:18.564003 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-28rxx"] Oct 08 07:52:18 crc kubenswrapper[4693]: E1008 07:52:18.565374 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c584d1d3-0869-4d7b-a6c2-09e1f1119065" containerName="extract-utilities" Oct 08 07:52:18 crc kubenswrapper[4693]: I1008 07:52:18.565393 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="c584d1d3-0869-4d7b-a6c2-09e1f1119065" containerName="extract-utilities" Oct 08 07:52:18 crc kubenswrapper[4693]: E1008 07:52:18.565413 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c584d1d3-0869-4d7b-a6c2-09e1f1119065" containerName="registry-server" Oct 08 07:52:18 crc kubenswrapper[4693]: I1008 07:52:18.565422 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="c584d1d3-0869-4d7b-a6c2-09e1f1119065" containerName="registry-server" Oct 08 07:52:18 crc kubenswrapper[4693]: E1008 07:52:18.565458 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d409765-24d9-4dab-b5a1-27dd9eb98a57" containerName="registry-server" Oct 08 07:52:18 crc kubenswrapper[4693]: I1008 07:52:18.565467 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d409765-24d9-4dab-b5a1-27dd9eb98a57" containerName="registry-server" Oct 08 07:52:18 crc kubenswrapper[4693]: E1008 07:52:18.565482 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d409765-24d9-4dab-b5a1-27dd9eb98a57" containerName="extract-content" Oct 08 07:52:18 crc kubenswrapper[4693]: I1008 07:52:18.565490 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d409765-24d9-4dab-b5a1-27dd9eb98a57" containerName="extract-content" Oct 08 07:52:18 crc kubenswrapper[4693]: E1008 07:52:18.565501 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d409765-24d9-4dab-b5a1-27dd9eb98a57" containerName="extract-utilities" Oct 08 07:52:18 crc kubenswrapper[4693]: I1008 07:52:18.565509 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d409765-24d9-4dab-b5a1-27dd9eb98a57" containerName="extract-utilities" Oct 08 07:52:18 crc kubenswrapper[4693]: E1008 07:52:18.565532 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c584d1d3-0869-4d7b-a6c2-09e1f1119065" containerName="extract-content" Oct 08 07:52:18 crc kubenswrapper[4693]: I1008 07:52:18.565541 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="c584d1d3-0869-4d7b-a6c2-09e1f1119065" containerName="extract-content" Oct 08 07:52:18 crc kubenswrapper[4693]: I1008 07:52:18.565790 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="c584d1d3-0869-4d7b-a6c2-09e1f1119065" containerName="registry-server" Oct 08 07:52:18 crc kubenswrapper[4693]: I1008 07:52:18.565803 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d409765-24d9-4dab-b5a1-27dd9eb98a57" containerName="registry-server" Oct 08 07:52:18 crc kubenswrapper[4693]: I1008 07:52:18.569343 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-28rxx" Oct 08 07:52:18 crc kubenswrapper[4693]: I1008 07:52:18.579150 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-28rxx"] Oct 08 07:52:18 crc kubenswrapper[4693]: I1008 07:52:18.751805 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb84274c-0a78-495b-894f-fc49fd31ec4f-catalog-content\") pod \"community-operators-28rxx\" (UID: \"bb84274c-0a78-495b-894f-fc49fd31ec4f\") " pod="openshift-marketplace/community-operators-28rxx" Oct 08 07:52:18 crc kubenswrapper[4693]: I1008 07:52:18.751899 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb84274c-0a78-495b-894f-fc49fd31ec4f-utilities\") pod \"community-operators-28rxx\" (UID: \"bb84274c-0a78-495b-894f-fc49fd31ec4f\") " pod="openshift-marketplace/community-operators-28rxx" Oct 08 07:52:18 crc kubenswrapper[4693]: I1008 07:52:18.752548 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zftst\" (UniqueName: \"kubernetes.io/projected/bb84274c-0a78-495b-894f-fc49fd31ec4f-kube-api-access-zftst\") pod \"community-operators-28rxx\" (UID: \"bb84274c-0a78-495b-894f-fc49fd31ec4f\") " pod="openshift-marketplace/community-operators-28rxx" Oct 08 07:52:18 crc kubenswrapper[4693]: I1008 07:52:18.854687 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zftst\" (UniqueName: \"kubernetes.io/projected/bb84274c-0a78-495b-894f-fc49fd31ec4f-kube-api-access-zftst\") pod \"community-operators-28rxx\" (UID: \"bb84274c-0a78-495b-894f-fc49fd31ec4f\") " pod="openshift-marketplace/community-operators-28rxx" Oct 08 07:52:18 crc kubenswrapper[4693]: I1008 07:52:18.855067 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb84274c-0a78-495b-894f-fc49fd31ec4f-catalog-content\") pod \"community-operators-28rxx\" (UID: \"bb84274c-0a78-495b-894f-fc49fd31ec4f\") " pod="openshift-marketplace/community-operators-28rxx" Oct 08 07:52:18 crc kubenswrapper[4693]: I1008 07:52:18.855134 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb84274c-0a78-495b-894f-fc49fd31ec4f-utilities\") pod \"community-operators-28rxx\" (UID: \"bb84274c-0a78-495b-894f-fc49fd31ec4f\") " pod="openshift-marketplace/community-operators-28rxx" Oct 08 07:52:18 crc kubenswrapper[4693]: I1008 07:52:18.855606 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb84274c-0a78-495b-894f-fc49fd31ec4f-utilities\") pod \"community-operators-28rxx\" (UID: \"bb84274c-0a78-495b-894f-fc49fd31ec4f\") " pod="openshift-marketplace/community-operators-28rxx" Oct 08 07:52:18 crc kubenswrapper[4693]: I1008 07:52:18.855640 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb84274c-0a78-495b-894f-fc49fd31ec4f-catalog-content\") pod \"community-operators-28rxx\" (UID: \"bb84274c-0a78-495b-894f-fc49fd31ec4f\") " pod="openshift-marketplace/community-operators-28rxx" Oct 08 07:52:18 crc kubenswrapper[4693]: I1008 07:52:18.882834 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zftst\" (UniqueName: \"kubernetes.io/projected/bb84274c-0a78-495b-894f-fc49fd31ec4f-kube-api-access-zftst\") pod \"community-operators-28rxx\" (UID: \"bb84274c-0a78-495b-894f-fc49fd31ec4f\") " pod="openshift-marketplace/community-operators-28rxx" Oct 08 07:52:18 crc kubenswrapper[4693]: I1008 07:52:18.899200 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-28rxx" Oct 08 07:52:19 crc kubenswrapper[4693]: I1008 07:52:19.448639 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-28rxx"] Oct 08 07:52:20 crc kubenswrapper[4693]: I1008 07:52:20.173415 4693 generic.go:334] "Generic (PLEG): container finished" podID="bb84274c-0a78-495b-894f-fc49fd31ec4f" containerID="b1684cb16af159826947e41d298c6aaec8bd62f02295c0bb7a63b2fba50273a7" exitCode=0 Oct 08 07:52:20 crc kubenswrapper[4693]: I1008 07:52:20.173478 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-28rxx" event={"ID":"bb84274c-0a78-495b-894f-fc49fd31ec4f","Type":"ContainerDied","Data":"b1684cb16af159826947e41d298c6aaec8bd62f02295c0bb7a63b2fba50273a7"} Oct 08 07:52:20 crc kubenswrapper[4693]: I1008 07:52:20.174113 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-28rxx" event={"ID":"bb84274c-0a78-495b-894f-fc49fd31ec4f","Type":"ContainerStarted","Data":"76611e8f5e6de375344060a278773c10bed333d9297754ae34063c289a8d013f"} Oct 08 07:52:21 crc kubenswrapper[4693]: I1008 07:52:21.198074 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-28rxx" event={"ID":"bb84274c-0a78-495b-894f-fc49fd31ec4f","Type":"ContainerStarted","Data":"bafab93a15203421c5125d98c654bc2a8d0b5284a38f3bd86a0446506766081c"} Oct 08 07:52:22 crc kubenswrapper[4693]: I1008 07:52:22.214702 4693 generic.go:334] "Generic (PLEG): container finished" podID="bb84274c-0a78-495b-894f-fc49fd31ec4f" containerID="bafab93a15203421c5125d98c654bc2a8d0b5284a38f3bd86a0446506766081c" exitCode=0 Oct 08 07:52:22 crc kubenswrapper[4693]: I1008 07:52:22.214764 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-28rxx" event={"ID":"bb84274c-0a78-495b-894f-fc49fd31ec4f","Type":"ContainerDied","Data":"bafab93a15203421c5125d98c654bc2a8d0b5284a38f3bd86a0446506766081c"} Oct 08 07:52:23 crc kubenswrapper[4693]: I1008 07:52:23.227105 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-28rxx" event={"ID":"bb84274c-0a78-495b-894f-fc49fd31ec4f","Type":"ContainerStarted","Data":"3f49f60b7a3821d5b907c51b6f842b1b83f8e06ac9a72bbf36d77fc09c165c1a"} Oct 08 07:52:23 crc kubenswrapper[4693]: I1008 07:52:23.294618 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-28rxx" podStartSLOduration=2.811021435 podStartE2EDuration="5.2945996s" podCreationTimestamp="2025-10-08 07:52:18 +0000 UTC" firstStartedPulling="2025-10-08 07:52:20.178138035 +0000 UTC m=+2125.549102970" lastFinishedPulling="2025-10-08 07:52:22.66171619 +0000 UTC m=+2128.032681135" observedRunningTime="2025-10-08 07:52:23.291768786 +0000 UTC m=+2128.662733721" watchObservedRunningTime="2025-10-08 07:52:23.2945996 +0000 UTC m=+2128.665564535" Oct 08 07:52:23 crc kubenswrapper[4693]: I1008 07:52:23.490198 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:52:23 crc kubenswrapper[4693]: I1008 07:52:23.490496 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:52:28 crc kubenswrapper[4693]: I1008 07:52:28.900226 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-28rxx" Oct 08 07:52:28 crc kubenswrapper[4693]: I1008 07:52:28.900583 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-28rxx" Oct 08 07:52:28 crc kubenswrapper[4693]: I1008 07:52:28.955305 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-28rxx" Oct 08 07:52:29 crc kubenswrapper[4693]: I1008 07:52:29.359381 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-28rxx" Oct 08 07:52:29 crc kubenswrapper[4693]: I1008 07:52:29.421682 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-28rxx"] Oct 08 07:52:31 crc kubenswrapper[4693]: I1008 07:52:31.314647 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-28rxx" podUID="bb84274c-0a78-495b-894f-fc49fd31ec4f" containerName="registry-server" containerID="cri-o://3f49f60b7a3821d5b907c51b6f842b1b83f8e06ac9a72bbf36d77fc09c165c1a" gracePeriod=2 Oct 08 07:52:32 crc kubenswrapper[4693]: I1008 07:52:32.000748 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-28rxx" Oct 08 07:52:32 crc kubenswrapper[4693]: I1008 07:52:32.003926 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb84274c-0a78-495b-894f-fc49fd31ec4f-catalog-content\") pod \"bb84274c-0a78-495b-894f-fc49fd31ec4f\" (UID: \"bb84274c-0a78-495b-894f-fc49fd31ec4f\") " Oct 08 07:52:32 crc kubenswrapper[4693]: I1008 07:52:32.004002 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb84274c-0a78-495b-894f-fc49fd31ec4f-utilities\") pod \"bb84274c-0a78-495b-894f-fc49fd31ec4f\" (UID: \"bb84274c-0a78-495b-894f-fc49fd31ec4f\") " Oct 08 07:52:32 crc kubenswrapper[4693]: I1008 07:52:32.004036 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zftst\" (UniqueName: \"kubernetes.io/projected/bb84274c-0a78-495b-894f-fc49fd31ec4f-kube-api-access-zftst\") pod \"bb84274c-0a78-495b-894f-fc49fd31ec4f\" (UID: \"bb84274c-0a78-495b-894f-fc49fd31ec4f\") " Oct 08 07:52:32 crc kubenswrapper[4693]: I1008 07:52:32.005371 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb84274c-0a78-495b-894f-fc49fd31ec4f-utilities" (OuterVolumeSpecName: "utilities") pod "bb84274c-0a78-495b-894f-fc49fd31ec4f" (UID: "bb84274c-0a78-495b-894f-fc49fd31ec4f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:52:32 crc kubenswrapper[4693]: I1008 07:52:32.014383 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb84274c-0a78-495b-894f-fc49fd31ec4f-kube-api-access-zftst" (OuterVolumeSpecName: "kube-api-access-zftst") pod "bb84274c-0a78-495b-894f-fc49fd31ec4f" (UID: "bb84274c-0a78-495b-894f-fc49fd31ec4f"). InnerVolumeSpecName "kube-api-access-zftst". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:52:32 crc kubenswrapper[4693]: I1008 07:52:32.054665 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb84274c-0a78-495b-894f-fc49fd31ec4f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bb84274c-0a78-495b-894f-fc49fd31ec4f" (UID: "bb84274c-0a78-495b-894f-fc49fd31ec4f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 07:52:32 crc kubenswrapper[4693]: I1008 07:52:32.106155 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb84274c-0a78-495b-894f-fc49fd31ec4f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 07:52:32 crc kubenswrapper[4693]: I1008 07:52:32.106183 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb84274c-0a78-495b-894f-fc49fd31ec4f-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 07:52:32 crc kubenswrapper[4693]: I1008 07:52:32.106194 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zftst\" (UniqueName: \"kubernetes.io/projected/bb84274c-0a78-495b-894f-fc49fd31ec4f-kube-api-access-zftst\") on node \"crc\" DevicePath \"\"" Oct 08 07:52:32 crc kubenswrapper[4693]: I1008 07:52:32.326642 4693 generic.go:334] "Generic (PLEG): container finished" podID="bb84274c-0a78-495b-894f-fc49fd31ec4f" containerID="3f49f60b7a3821d5b907c51b6f842b1b83f8e06ac9a72bbf36d77fc09c165c1a" exitCode=0 Oct 08 07:52:32 crc kubenswrapper[4693]: I1008 07:52:32.326725 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-28rxx" event={"ID":"bb84274c-0a78-495b-894f-fc49fd31ec4f","Type":"ContainerDied","Data":"3f49f60b7a3821d5b907c51b6f842b1b83f8e06ac9a72bbf36d77fc09c165c1a"} Oct 08 07:52:32 crc kubenswrapper[4693]: I1008 07:52:32.326727 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-28rxx" Oct 08 07:52:32 crc kubenswrapper[4693]: I1008 07:52:32.326846 4693 scope.go:117] "RemoveContainer" containerID="3f49f60b7a3821d5b907c51b6f842b1b83f8e06ac9a72bbf36d77fc09c165c1a" Oct 08 07:52:32 crc kubenswrapper[4693]: I1008 07:52:32.326786 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-28rxx" event={"ID":"bb84274c-0a78-495b-894f-fc49fd31ec4f","Type":"ContainerDied","Data":"76611e8f5e6de375344060a278773c10bed333d9297754ae34063c289a8d013f"} Oct 08 07:52:32 crc kubenswrapper[4693]: I1008 07:52:32.377286 4693 scope.go:117] "RemoveContainer" containerID="bafab93a15203421c5125d98c654bc2a8d0b5284a38f3bd86a0446506766081c" Oct 08 07:52:32 crc kubenswrapper[4693]: I1008 07:52:32.388523 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-28rxx"] Oct 08 07:52:32 crc kubenswrapper[4693]: I1008 07:52:32.400511 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-28rxx"] Oct 08 07:52:32 crc kubenswrapper[4693]: I1008 07:52:32.416552 4693 scope.go:117] "RemoveContainer" containerID="b1684cb16af159826947e41d298c6aaec8bd62f02295c0bb7a63b2fba50273a7" Oct 08 07:52:32 crc kubenswrapper[4693]: I1008 07:52:32.461322 4693 scope.go:117] "RemoveContainer" containerID="3f49f60b7a3821d5b907c51b6f842b1b83f8e06ac9a72bbf36d77fc09c165c1a" Oct 08 07:52:32 crc kubenswrapper[4693]: E1008 07:52:32.461893 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f49f60b7a3821d5b907c51b6f842b1b83f8e06ac9a72bbf36d77fc09c165c1a\": container with ID starting with 3f49f60b7a3821d5b907c51b6f842b1b83f8e06ac9a72bbf36d77fc09c165c1a not found: ID does not exist" containerID="3f49f60b7a3821d5b907c51b6f842b1b83f8e06ac9a72bbf36d77fc09c165c1a" Oct 08 07:52:32 crc kubenswrapper[4693]: I1008 07:52:32.462055 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f49f60b7a3821d5b907c51b6f842b1b83f8e06ac9a72bbf36d77fc09c165c1a"} err="failed to get container status \"3f49f60b7a3821d5b907c51b6f842b1b83f8e06ac9a72bbf36d77fc09c165c1a\": rpc error: code = NotFound desc = could not find container \"3f49f60b7a3821d5b907c51b6f842b1b83f8e06ac9a72bbf36d77fc09c165c1a\": container with ID starting with 3f49f60b7a3821d5b907c51b6f842b1b83f8e06ac9a72bbf36d77fc09c165c1a not found: ID does not exist" Oct 08 07:52:32 crc kubenswrapper[4693]: I1008 07:52:32.462274 4693 scope.go:117] "RemoveContainer" containerID="bafab93a15203421c5125d98c654bc2a8d0b5284a38f3bd86a0446506766081c" Oct 08 07:52:32 crc kubenswrapper[4693]: E1008 07:52:32.462998 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bafab93a15203421c5125d98c654bc2a8d0b5284a38f3bd86a0446506766081c\": container with ID starting with bafab93a15203421c5125d98c654bc2a8d0b5284a38f3bd86a0446506766081c not found: ID does not exist" containerID="bafab93a15203421c5125d98c654bc2a8d0b5284a38f3bd86a0446506766081c" Oct 08 07:52:32 crc kubenswrapper[4693]: I1008 07:52:32.463055 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bafab93a15203421c5125d98c654bc2a8d0b5284a38f3bd86a0446506766081c"} err="failed to get container status \"bafab93a15203421c5125d98c654bc2a8d0b5284a38f3bd86a0446506766081c\": rpc error: code = NotFound desc = could not find container \"bafab93a15203421c5125d98c654bc2a8d0b5284a38f3bd86a0446506766081c\": container with ID starting with bafab93a15203421c5125d98c654bc2a8d0b5284a38f3bd86a0446506766081c not found: ID does not exist" Oct 08 07:52:32 crc kubenswrapper[4693]: I1008 07:52:32.463091 4693 scope.go:117] "RemoveContainer" containerID="b1684cb16af159826947e41d298c6aaec8bd62f02295c0bb7a63b2fba50273a7" Oct 08 07:52:32 crc kubenswrapper[4693]: E1008 07:52:32.463732 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1684cb16af159826947e41d298c6aaec8bd62f02295c0bb7a63b2fba50273a7\": container with ID starting with b1684cb16af159826947e41d298c6aaec8bd62f02295c0bb7a63b2fba50273a7 not found: ID does not exist" containerID="b1684cb16af159826947e41d298c6aaec8bd62f02295c0bb7a63b2fba50273a7" Oct 08 07:52:32 crc kubenswrapper[4693]: I1008 07:52:32.463772 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1684cb16af159826947e41d298c6aaec8bd62f02295c0bb7a63b2fba50273a7"} err="failed to get container status \"b1684cb16af159826947e41d298c6aaec8bd62f02295c0bb7a63b2fba50273a7\": rpc error: code = NotFound desc = could not find container \"b1684cb16af159826947e41d298c6aaec8bd62f02295c0bb7a63b2fba50273a7\": container with ID starting with b1684cb16af159826947e41d298c6aaec8bd62f02295c0bb7a63b2fba50273a7 not found: ID does not exist" Oct 08 07:52:33 crc kubenswrapper[4693]: I1008 07:52:33.380673 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb84274c-0a78-495b-894f-fc49fd31ec4f" path="/var/lib/kubelet/pods/bb84274c-0a78-495b-894f-fc49fd31ec4f/volumes" Oct 08 07:52:53 crc kubenswrapper[4693]: I1008 07:52:53.490162 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 07:52:53 crc kubenswrapper[4693]: I1008 07:52:53.491252 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 07:52:53 crc kubenswrapper[4693]: I1008 07:52:53.491343 4693 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 07:52:53 crc kubenswrapper[4693]: I1008 07:52:53.492751 4693 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f"} pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 08 07:52:53 crc kubenswrapper[4693]: I1008 07:52:53.492925 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" containerID="cri-o://c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f" gracePeriod=600 Oct 08 07:52:53 crc kubenswrapper[4693]: E1008 07:52:53.621580 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:52:54 crc kubenswrapper[4693]: I1008 07:52:54.617197 4693 generic.go:334] "Generic (PLEG): container finished" podID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerID="c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f" exitCode=0 Oct 08 07:52:54 crc kubenswrapper[4693]: I1008 07:52:54.617292 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerDied","Data":"c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f"} Oct 08 07:52:54 crc kubenswrapper[4693]: I1008 07:52:54.617599 4693 scope.go:117] "RemoveContainer" containerID="ebfdbc64dc1ffeb4225c54ca810a22cc10ba58b76c91b45301c1195e461b6b73" Oct 08 07:52:54 crc kubenswrapper[4693]: I1008 07:52:54.618341 4693 scope.go:117] "RemoveContainer" containerID="c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f" Oct 08 07:52:54 crc kubenswrapper[4693]: E1008 07:52:54.618636 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:53:07 crc kubenswrapper[4693]: I1008 07:53:07.363427 4693 scope.go:117] "RemoveContainer" containerID="c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f" Oct 08 07:53:07 crc kubenswrapper[4693]: E1008 07:53:07.364863 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:53:18 crc kubenswrapper[4693]: I1008 07:53:18.362628 4693 scope.go:117] "RemoveContainer" containerID="c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f" Oct 08 07:53:18 crc kubenswrapper[4693]: E1008 07:53:18.363543 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:53:31 crc kubenswrapper[4693]: I1008 07:53:31.364331 4693 scope.go:117] "RemoveContainer" containerID="c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f" Oct 08 07:53:31 crc kubenswrapper[4693]: E1008 07:53:31.365122 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:53:44 crc kubenswrapper[4693]: I1008 07:53:44.362839 4693 scope.go:117] "RemoveContainer" containerID="c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f" Oct 08 07:53:44 crc kubenswrapper[4693]: E1008 07:53:44.363538 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:53:56 crc kubenswrapper[4693]: I1008 07:53:56.363960 4693 scope.go:117] "RemoveContainer" containerID="c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f" Oct 08 07:53:56 crc kubenswrapper[4693]: E1008 07:53:56.365201 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:54:08 crc kubenswrapper[4693]: I1008 07:54:08.364209 4693 scope.go:117] "RemoveContainer" containerID="c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f" Oct 08 07:54:08 crc kubenswrapper[4693]: E1008 07:54:08.365287 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:54:23 crc kubenswrapper[4693]: I1008 07:54:23.362766 4693 scope.go:117] "RemoveContainer" containerID="c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f" Oct 08 07:54:23 crc kubenswrapper[4693]: E1008 07:54:23.363538 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:54:34 crc kubenswrapper[4693]: I1008 07:54:34.363643 4693 scope.go:117] "RemoveContainer" containerID="c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f" Oct 08 07:54:34 crc kubenswrapper[4693]: E1008 07:54:34.364443 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:54:49 crc kubenswrapper[4693]: I1008 07:54:49.363321 4693 scope.go:117] "RemoveContainer" containerID="c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f" Oct 08 07:54:49 crc kubenswrapper[4693]: E1008 07:54:49.363999 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:55:03 crc kubenswrapper[4693]: I1008 07:55:03.363571 4693 scope.go:117] "RemoveContainer" containerID="c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f" Oct 08 07:55:03 crc kubenswrapper[4693]: E1008 07:55:03.365570 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:55:15 crc kubenswrapper[4693]: I1008 07:55:15.375281 4693 scope.go:117] "RemoveContainer" containerID="c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f" Oct 08 07:55:15 crc kubenswrapper[4693]: E1008 07:55:15.375957 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:55:29 crc kubenswrapper[4693]: I1008 07:55:29.363410 4693 scope.go:117] "RemoveContainer" containerID="c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f" Oct 08 07:55:29 crc kubenswrapper[4693]: E1008 07:55:29.364337 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:55:44 crc kubenswrapper[4693]: I1008 07:55:44.364003 4693 scope.go:117] "RemoveContainer" containerID="c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f" Oct 08 07:55:44 crc kubenswrapper[4693]: E1008 07:55:44.365046 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:55:55 crc kubenswrapper[4693]: I1008 07:55:55.375274 4693 scope.go:117] "RemoveContainer" containerID="c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f" Oct 08 07:55:55 crc kubenswrapper[4693]: E1008 07:55:55.376232 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:56:06 crc kubenswrapper[4693]: I1008 07:56:06.839414 4693 generic.go:334] "Generic (PLEG): container finished" podID="014202b7-db23-455e-ba57-d12d3b6e2975" containerID="2e123601c08e82b2bdcc10d6399df91cbdfb900d7d5439e1d0a4b9bc918f07c7" exitCode=0 Oct 08 07:56:06 crc kubenswrapper[4693]: I1008 07:56:06.840359 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk" event={"ID":"014202b7-db23-455e-ba57-d12d3b6e2975","Type":"ContainerDied","Data":"2e123601c08e82b2bdcc10d6399df91cbdfb900d7d5439e1d0a4b9bc918f07c7"} Oct 08 07:56:07 crc kubenswrapper[4693]: I1008 07:56:07.367882 4693 scope.go:117] "RemoveContainer" containerID="c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f" Oct 08 07:56:07 crc kubenswrapper[4693]: E1008 07:56:07.368636 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:56:08 crc kubenswrapper[4693]: I1008 07:56:08.420439 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk" Oct 08 07:56:08 crc kubenswrapper[4693]: I1008 07:56:08.543863 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/014202b7-db23-455e-ba57-d12d3b6e2975-ssh-key\") pod \"014202b7-db23-455e-ba57-d12d3b6e2975\" (UID: \"014202b7-db23-455e-ba57-d12d3b6e2975\") " Oct 08 07:56:08 crc kubenswrapper[4693]: I1008 07:56:08.544041 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/014202b7-db23-455e-ba57-d12d3b6e2975-libvirt-secret-0\") pod \"014202b7-db23-455e-ba57-d12d3b6e2975\" (UID: \"014202b7-db23-455e-ba57-d12d3b6e2975\") " Oct 08 07:56:08 crc kubenswrapper[4693]: I1008 07:56:08.544083 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzwsv\" (UniqueName: \"kubernetes.io/projected/014202b7-db23-455e-ba57-d12d3b6e2975-kube-api-access-lzwsv\") pod \"014202b7-db23-455e-ba57-d12d3b6e2975\" (UID: \"014202b7-db23-455e-ba57-d12d3b6e2975\") " Oct 08 07:56:08 crc kubenswrapper[4693]: I1008 07:56:08.544170 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/014202b7-db23-455e-ba57-d12d3b6e2975-inventory\") pod \"014202b7-db23-455e-ba57-d12d3b6e2975\" (UID: \"014202b7-db23-455e-ba57-d12d3b6e2975\") " Oct 08 07:56:08 crc kubenswrapper[4693]: I1008 07:56:08.544277 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/014202b7-db23-455e-ba57-d12d3b6e2975-libvirt-combined-ca-bundle\") pod \"014202b7-db23-455e-ba57-d12d3b6e2975\" (UID: \"014202b7-db23-455e-ba57-d12d3b6e2975\") " Oct 08 07:56:08 crc kubenswrapper[4693]: I1008 07:56:08.549958 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/014202b7-db23-455e-ba57-d12d3b6e2975-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "014202b7-db23-455e-ba57-d12d3b6e2975" (UID: "014202b7-db23-455e-ba57-d12d3b6e2975"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:56:08 crc kubenswrapper[4693]: I1008 07:56:08.553452 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/014202b7-db23-455e-ba57-d12d3b6e2975-kube-api-access-lzwsv" (OuterVolumeSpecName: "kube-api-access-lzwsv") pod "014202b7-db23-455e-ba57-d12d3b6e2975" (UID: "014202b7-db23-455e-ba57-d12d3b6e2975"). InnerVolumeSpecName "kube-api-access-lzwsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 07:56:08 crc kubenswrapper[4693]: I1008 07:56:08.574496 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/014202b7-db23-455e-ba57-d12d3b6e2975-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "014202b7-db23-455e-ba57-d12d3b6e2975" (UID: "014202b7-db23-455e-ba57-d12d3b6e2975"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:56:08 crc kubenswrapper[4693]: I1008 07:56:08.577648 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/014202b7-db23-455e-ba57-d12d3b6e2975-inventory" (OuterVolumeSpecName: "inventory") pod "014202b7-db23-455e-ba57-d12d3b6e2975" (UID: "014202b7-db23-455e-ba57-d12d3b6e2975"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:56:08 crc kubenswrapper[4693]: I1008 07:56:08.588948 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/014202b7-db23-455e-ba57-d12d3b6e2975-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "014202b7-db23-455e-ba57-d12d3b6e2975" (UID: "014202b7-db23-455e-ba57-d12d3b6e2975"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 07:56:08 crc kubenswrapper[4693]: I1008 07:56:08.647151 4693 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/014202b7-db23-455e-ba57-d12d3b6e2975-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 08 07:56:08 crc kubenswrapper[4693]: I1008 07:56:08.647194 4693 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/014202b7-db23-455e-ba57-d12d3b6e2975-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Oct 08 07:56:08 crc kubenswrapper[4693]: I1008 07:56:08.647208 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzwsv\" (UniqueName: \"kubernetes.io/projected/014202b7-db23-455e-ba57-d12d3b6e2975-kube-api-access-lzwsv\") on node \"crc\" DevicePath \"\"" Oct 08 07:56:08 crc kubenswrapper[4693]: I1008 07:56:08.647222 4693 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/014202b7-db23-455e-ba57-d12d3b6e2975-inventory\") on node \"crc\" DevicePath \"\"" Oct 08 07:56:08 crc kubenswrapper[4693]: I1008 07:56:08.647235 4693 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/014202b7-db23-455e-ba57-d12d3b6e2975-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 07:56:08 crc kubenswrapper[4693]: I1008 07:56:08.862978 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk" event={"ID":"014202b7-db23-455e-ba57-d12d3b6e2975","Type":"ContainerDied","Data":"4e27f5fcd78d26d10b0f97ba8b83e0a05cfafe6115c76745f6d7b590953707b5"} Oct 08 07:56:08 crc kubenswrapper[4693]: I1008 07:56:08.863020 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4e27f5fcd78d26d10b0f97ba8b83e0a05cfafe6115c76745f6d7b590953707b5" Oct 08 07:56:08 crc kubenswrapper[4693]: I1008 07:56:08.863090 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk" Oct 08 07:56:08 crc kubenswrapper[4693]: I1008 07:56:08.996348 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd"] Oct 08 07:56:08 crc kubenswrapper[4693]: E1008 07:56:08.997186 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="014202b7-db23-455e-ba57-d12d3b6e2975" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 08 07:56:08 crc kubenswrapper[4693]: I1008 07:56:08.997228 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="014202b7-db23-455e-ba57-d12d3b6e2975" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 08 07:56:08 crc kubenswrapper[4693]: E1008 07:56:08.997257 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb84274c-0a78-495b-894f-fc49fd31ec4f" containerName="registry-server" Oct 08 07:56:08 crc kubenswrapper[4693]: I1008 07:56:08.997269 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb84274c-0a78-495b-894f-fc49fd31ec4f" containerName="registry-server" Oct 08 07:56:08 crc kubenswrapper[4693]: E1008 07:56:08.997287 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb84274c-0a78-495b-894f-fc49fd31ec4f" containerName="extract-utilities" Oct 08 07:56:08 crc kubenswrapper[4693]: I1008 07:56:08.997301 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb84274c-0a78-495b-894f-fc49fd31ec4f" containerName="extract-utilities" Oct 08 07:56:08 crc kubenswrapper[4693]: E1008 07:56:08.997321 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb84274c-0a78-495b-894f-fc49fd31ec4f" containerName="extract-content" Oct 08 07:56:08 crc kubenswrapper[4693]: I1008 07:56:08.997334 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb84274c-0a78-495b-894f-fc49fd31ec4f" containerName="extract-content" Oct 08 07:56:08 crc kubenswrapper[4693]: I1008 07:56:08.997686 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb84274c-0a78-495b-894f-fc49fd31ec4f" containerName="registry-server" Oct 08 07:56:08 crc kubenswrapper[4693]: I1008 07:56:08.997725 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="014202b7-db23-455e-ba57-d12d3b6e2975" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 08 07:56:08 crc kubenswrapper[4693]: I1008 07:56:08.999580 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.003082 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.003235 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.003353 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.003684 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.003945 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.004203 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.004998 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zxm2d" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.006912 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd"] Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.054981 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-n87pd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.055084 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-n87pd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.055184 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-n87pd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.055216 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-n87pd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.055277 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-n87pd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.055326 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-n87pd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.055378 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgkkn\" (UniqueName: \"kubernetes.io/projected/14d12cdc-edb4-47c1-b245-b95cb21067bd-kube-api-access-wgkkn\") pod \"nova-edpm-deployment-openstack-edpm-ipam-n87pd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.055445 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-n87pd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.055482 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-n87pd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.157713 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-n87pd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.157791 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-n87pd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.157902 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgkkn\" (UniqueName: \"kubernetes.io/projected/14d12cdc-edb4-47c1-b245-b95cb21067bd-kube-api-access-wgkkn\") pod \"nova-edpm-deployment-openstack-edpm-ipam-n87pd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.157956 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-n87pd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.158001 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-n87pd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.158044 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-n87pd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.158151 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-n87pd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.158285 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-n87pd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.158328 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-n87pd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.159180 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-n87pd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.162484 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-n87pd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.162843 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-n87pd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.162961 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-n87pd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.163486 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-n87pd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.166294 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-n87pd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.166730 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-n87pd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.167096 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-n87pd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.185054 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgkkn\" (UniqueName: \"kubernetes.io/projected/14d12cdc-edb4-47c1-b245-b95cb21067bd-kube-api-access-wgkkn\") pod \"nova-edpm-deployment-openstack-edpm-ipam-n87pd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.319024 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.696377 4693 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.699139 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd"] Oct 08 07:56:09 crc kubenswrapper[4693]: I1008 07:56:09.873851 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" event={"ID":"14d12cdc-edb4-47c1-b245-b95cb21067bd","Type":"ContainerStarted","Data":"5b817de2fa917c07181a6df8813e6e15f5267e3543da3aad51e8f2b2a8eaa5a1"} Oct 08 07:56:10 crc kubenswrapper[4693]: I1008 07:56:10.891920 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" event={"ID":"14d12cdc-edb4-47c1-b245-b95cb21067bd","Type":"ContainerStarted","Data":"b4c7ef2e97f45047a8783b591f6855d8a71e61d7e544049020900b41be989e65"} Oct 08 07:56:10 crc kubenswrapper[4693]: I1008 07:56:10.929479 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" podStartSLOduration=2.402026807 podStartE2EDuration="2.929458939s" podCreationTimestamp="2025-10-08 07:56:08 +0000 UTC" firstStartedPulling="2025-10-08 07:56:09.696045838 +0000 UTC m=+2355.067010773" lastFinishedPulling="2025-10-08 07:56:10.22347796 +0000 UTC m=+2355.594442905" observedRunningTime="2025-10-08 07:56:10.918649525 +0000 UTC m=+2356.289614470" watchObservedRunningTime="2025-10-08 07:56:10.929458939 +0000 UTC m=+2356.300423884" Oct 08 07:56:19 crc kubenswrapper[4693]: I1008 07:56:19.364135 4693 scope.go:117] "RemoveContainer" containerID="c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f" Oct 08 07:56:19 crc kubenswrapper[4693]: E1008 07:56:19.365310 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:56:34 crc kubenswrapper[4693]: I1008 07:56:34.363727 4693 scope.go:117] "RemoveContainer" containerID="c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f" Oct 08 07:56:34 crc kubenswrapper[4693]: E1008 07:56:34.364676 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:56:47 crc kubenswrapper[4693]: I1008 07:56:47.362930 4693 scope.go:117] "RemoveContainer" containerID="c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f" Oct 08 07:56:47 crc kubenswrapper[4693]: E1008 07:56:47.363720 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:56:58 crc kubenswrapper[4693]: I1008 07:56:58.362460 4693 scope.go:117] "RemoveContainer" containerID="c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f" Oct 08 07:56:58 crc kubenswrapper[4693]: E1008 07:56:58.363355 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:57:13 crc kubenswrapper[4693]: I1008 07:57:13.363947 4693 scope.go:117] "RemoveContainer" containerID="c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f" Oct 08 07:57:13 crc kubenswrapper[4693]: E1008 07:57:13.366579 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:57:29 crc kubenswrapper[4693]: I1008 07:57:29.363180 4693 scope.go:117] "RemoveContainer" containerID="c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f" Oct 08 07:57:29 crc kubenswrapper[4693]: E1008 07:57:29.364184 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:57:40 crc kubenswrapper[4693]: I1008 07:57:40.364072 4693 scope.go:117] "RemoveContainer" containerID="c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f" Oct 08 07:57:40 crc kubenswrapper[4693]: E1008 07:57:40.365141 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 07:57:54 crc kubenswrapper[4693]: I1008 07:57:54.363617 4693 scope.go:117] "RemoveContainer" containerID="c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f" Oct 08 07:57:55 crc kubenswrapper[4693]: I1008 07:57:55.104435 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerStarted","Data":"7f7ddc6a4e37f0f16608812de5d34fc6d614c42d995eb4b20e05072ddcc7127c"} Oct 08 08:00:00 crc kubenswrapper[4693]: I1008 08:00:00.157987 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331840-jcrfr"] Oct 08 08:00:00 crc kubenswrapper[4693]: I1008 08:00:00.159664 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331840-jcrfr" Oct 08 08:00:00 crc kubenswrapper[4693]: I1008 08:00:00.161771 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 08 08:00:00 crc kubenswrapper[4693]: I1008 08:00:00.164994 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 08 08:00:00 crc kubenswrapper[4693]: I1008 08:00:00.174045 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331840-jcrfr"] Oct 08 08:00:00 crc kubenswrapper[4693]: I1008 08:00:00.199337 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdmsj\" (UniqueName: \"kubernetes.io/projected/444c3ec4-f711-4b79-94f9-ec5d22a9800c-kube-api-access-bdmsj\") pod \"collect-profiles-29331840-jcrfr\" (UID: \"444c3ec4-f711-4b79-94f9-ec5d22a9800c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331840-jcrfr" Oct 08 08:00:00 crc kubenswrapper[4693]: I1008 08:00:00.199385 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/444c3ec4-f711-4b79-94f9-ec5d22a9800c-secret-volume\") pod \"collect-profiles-29331840-jcrfr\" (UID: \"444c3ec4-f711-4b79-94f9-ec5d22a9800c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331840-jcrfr" Oct 08 08:00:00 crc kubenswrapper[4693]: I1008 08:00:00.199751 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/444c3ec4-f711-4b79-94f9-ec5d22a9800c-config-volume\") pod \"collect-profiles-29331840-jcrfr\" (UID: \"444c3ec4-f711-4b79-94f9-ec5d22a9800c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331840-jcrfr" Oct 08 08:00:00 crc kubenswrapper[4693]: I1008 08:00:00.302006 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdmsj\" (UniqueName: \"kubernetes.io/projected/444c3ec4-f711-4b79-94f9-ec5d22a9800c-kube-api-access-bdmsj\") pod \"collect-profiles-29331840-jcrfr\" (UID: \"444c3ec4-f711-4b79-94f9-ec5d22a9800c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331840-jcrfr" Oct 08 08:00:00 crc kubenswrapper[4693]: I1008 08:00:00.302063 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/444c3ec4-f711-4b79-94f9-ec5d22a9800c-secret-volume\") pod \"collect-profiles-29331840-jcrfr\" (UID: \"444c3ec4-f711-4b79-94f9-ec5d22a9800c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331840-jcrfr" Oct 08 08:00:00 crc kubenswrapper[4693]: I1008 08:00:00.302134 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/444c3ec4-f711-4b79-94f9-ec5d22a9800c-config-volume\") pod \"collect-profiles-29331840-jcrfr\" (UID: \"444c3ec4-f711-4b79-94f9-ec5d22a9800c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331840-jcrfr" Oct 08 08:00:00 crc kubenswrapper[4693]: I1008 08:00:00.303306 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/444c3ec4-f711-4b79-94f9-ec5d22a9800c-config-volume\") pod \"collect-profiles-29331840-jcrfr\" (UID: \"444c3ec4-f711-4b79-94f9-ec5d22a9800c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331840-jcrfr" Oct 08 08:00:00 crc kubenswrapper[4693]: I1008 08:00:00.310694 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/444c3ec4-f711-4b79-94f9-ec5d22a9800c-secret-volume\") pod \"collect-profiles-29331840-jcrfr\" (UID: \"444c3ec4-f711-4b79-94f9-ec5d22a9800c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331840-jcrfr" Oct 08 08:00:00 crc kubenswrapper[4693]: I1008 08:00:00.320794 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdmsj\" (UniqueName: \"kubernetes.io/projected/444c3ec4-f711-4b79-94f9-ec5d22a9800c-kube-api-access-bdmsj\") pod \"collect-profiles-29331840-jcrfr\" (UID: \"444c3ec4-f711-4b79-94f9-ec5d22a9800c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331840-jcrfr" Oct 08 08:00:00 crc kubenswrapper[4693]: I1008 08:00:00.499931 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331840-jcrfr" Oct 08 08:00:01 crc kubenswrapper[4693]: I1008 08:00:01.018134 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331840-jcrfr"] Oct 08 08:00:01 crc kubenswrapper[4693]: I1008 08:00:01.613339 4693 generic.go:334] "Generic (PLEG): container finished" podID="444c3ec4-f711-4b79-94f9-ec5d22a9800c" containerID="56c2f5d15fe06980e933f499feeb7fa4dba50f4486145ff76012ad7d58c10378" exitCode=0 Oct 08 08:00:01 crc kubenswrapper[4693]: I1008 08:00:01.613406 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331840-jcrfr" event={"ID":"444c3ec4-f711-4b79-94f9-ec5d22a9800c","Type":"ContainerDied","Data":"56c2f5d15fe06980e933f499feeb7fa4dba50f4486145ff76012ad7d58c10378"} Oct 08 08:00:01 crc kubenswrapper[4693]: I1008 08:00:01.613471 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331840-jcrfr" event={"ID":"444c3ec4-f711-4b79-94f9-ec5d22a9800c","Type":"ContainerStarted","Data":"729b93209dab657e0b3287a524196305185d5a5a8f5852720a0698292d9e7242"} Oct 08 08:00:03 crc kubenswrapper[4693]: I1008 08:00:03.121747 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331840-jcrfr" Oct 08 08:00:03 crc kubenswrapper[4693]: I1008 08:00:03.170650 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/444c3ec4-f711-4b79-94f9-ec5d22a9800c-config-volume\") pod \"444c3ec4-f711-4b79-94f9-ec5d22a9800c\" (UID: \"444c3ec4-f711-4b79-94f9-ec5d22a9800c\") " Oct 08 08:00:03 crc kubenswrapper[4693]: I1008 08:00:03.170759 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/444c3ec4-f711-4b79-94f9-ec5d22a9800c-secret-volume\") pod \"444c3ec4-f711-4b79-94f9-ec5d22a9800c\" (UID: \"444c3ec4-f711-4b79-94f9-ec5d22a9800c\") " Oct 08 08:00:03 crc kubenswrapper[4693]: I1008 08:00:03.170828 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bdmsj\" (UniqueName: \"kubernetes.io/projected/444c3ec4-f711-4b79-94f9-ec5d22a9800c-kube-api-access-bdmsj\") pod \"444c3ec4-f711-4b79-94f9-ec5d22a9800c\" (UID: \"444c3ec4-f711-4b79-94f9-ec5d22a9800c\") " Oct 08 08:00:03 crc kubenswrapper[4693]: I1008 08:00:03.171385 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/444c3ec4-f711-4b79-94f9-ec5d22a9800c-config-volume" (OuterVolumeSpecName: "config-volume") pod "444c3ec4-f711-4b79-94f9-ec5d22a9800c" (UID: "444c3ec4-f711-4b79-94f9-ec5d22a9800c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 08:00:03 crc kubenswrapper[4693]: I1008 08:00:03.176552 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/444c3ec4-f711-4b79-94f9-ec5d22a9800c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "444c3ec4-f711-4b79-94f9-ec5d22a9800c" (UID: "444c3ec4-f711-4b79-94f9-ec5d22a9800c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 08:00:03 crc kubenswrapper[4693]: I1008 08:00:03.179668 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/444c3ec4-f711-4b79-94f9-ec5d22a9800c-kube-api-access-bdmsj" (OuterVolumeSpecName: "kube-api-access-bdmsj") pod "444c3ec4-f711-4b79-94f9-ec5d22a9800c" (UID: "444c3ec4-f711-4b79-94f9-ec5d22a9800c"). InnerVolumeSpecName "kube-api-access-bdmsj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 08:00:03 crc kubenswrapper[4693]: I1008 08:00:03.273141 4693 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/444c3ec4-f711-4b79-94f9-ec5d22a9800c-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 08 08:00:03 crc kubenswrapper[4693]: I1008 08:00:03.273481 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bdmsj\" (UniqueName: \"kubernetes.io/projected/444c3ec4-f711-4b79-94f9-ec5d22a9800c-kube-api-access-bdmsj\") on node \"crc\" DevicePath \"\"" Oct 08 08:00:03 crc kubenswrapper[4693]: I1008 08:00:03.273495 4693 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/444c3ec4-f711-4b79-94f9-ec5d22a9800c-config-volume\") on node \"crc\" DevicePath \"\"" Oct 08 08:00:03 crc kubenswrapper[4693]: I1008 08:00:03.642677 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331840-jcrfr" event={"ID":"444c3ec4-f711-4b79-94f9-ec5d22a9800c","Type":"ContainerDied","Data":"729b93209dab657e0b3287a524196305185d5a5a8f5852720a0698292d9e7242"} Oct 08 08:00:03 crc kubenswrapper[4693]: I1008 08:00:03.642756 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="729b93209dab657e0b3287a524196305185d5a5a8f5852720a0698292d9e7242" Oct 08 08:00:03 crc kubenswrapper[4693]: I1008 08:00:03.642692 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331840-jcrfr" Oct 08 08:00:03 crc kubenswrapper[4693]: I1008 08:00:03.644916 4693 generic.go:334] "Generic (PLEG): container finished" podID="14d12cdc-edb4-47c1-b245-b95cb21067bd" containerID="b4c7ef2e97f45047a8783b591f6855d8a71e61d7e544049020900b41be989e65" exitCode=0 Oct 08 08:00:03 crc kubenswrapper[4693]: I1008 08:00:03.645138 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" event={"ID":"14d12cdc-edb4-47c1-b245-b95cb21067bd","Type":"ContainerDied","Data":"b4c7ef2e97f45047a8783b591f6855d8a71e61d7e544049020900b41be989e65"} Oct 08 08:00:04 crc kubenswrapper[4693]: I1008 08:00:04.209807 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331795-kwbx4"] Oct 08 08:00:04 crc kubenswrapper[4693]: I1008 08:00:04.219621 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331795-kwbx4"] Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.076556 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.234411 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-combined-ca-bundle\") pod \"14d12cdc-edb4-47c1-b245-b95cb21067bd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.234492 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-cell1-compute-config-0\") pod \"14d12cdc-edb4-47c1-b245-b95cb21067bd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.234518 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-inventory\") pod \"14d12cdc-edb4-47c1-b245-b95cb21067bd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.234565 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-migration-ssh-key-1\") pod \"14d12cdc-edb4-47c1-b245-b95cb21067bd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.234640 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-extra-config-0\") pod \"14d12cdc-edb4-47c1-b245-b95cb21067bd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.234660 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-ssh-key\") pod \"14d12cdc-edb4-47c1-b245-b95cb21067bd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.234697 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-cell1-compute-config-1\") pod \"14d12cdc-edb4-47c1-b245-b95cb21067bd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.234717 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgkkn\" (UniqueName: \"kubernetes.io/projected/14d12cdc-edb4-47c1-b245-b95cb21067bd-kube-api-access-wgkkn\") pod \"14d12cdc-edb4-47c1-b245-b95cb21067bd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.234771 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-migration-ssh-key-0\") pod \"14d12cdc-edb4-47c1-b245-b95cb21067bd\" (UID: \"14d12cdc-edb4-47c1-b245-b95cb21067bd\") " Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.269893 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "14d12cdc-edb4-47c1-b245-b95cb21067bd" (UID: "14d12cdc-edb4-47c1-b245-b95cb21067bd"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.272358 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14d12cdc-edb4-47c1-b245-b95cb21067bd-kube-api-access-wgkkn" (OuterVolumeSpecName: "kube-api-access-wgkkn") pod "14d12cdc-edb4-47c1-b245-b95cb21067bd" (UID: "14d12cdc-edb4-47c1-b245-b95cb21067bd"). InnerVolumeSpecName "kube-api-access-wgkkn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.280927 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "14d12cdc-edb4-47c1-b245-b95cb21067bd" (UID: "14d12cdc-edb4-47c1-b245-b95cb21067bd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.284087 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-inventory" (OuterVolumeSpecName: "inventory") pod "14d12cdc-edb4-47c1-b245-b95cb21067bd" (UID: "14d12cdc-edb4-47c1-b245-b95cb21067bd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.297502 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "14d12cdc-edb4-47c1-b245-b95cb21067bd" (UID: "14d12cdc-edb4-47c1-b245-b95cb21067bd"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.302587 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "14d12cdc-edb4-47c1-b245-b95cb21067bd" (UID: "14d12cdc-edb4-47c1-b245-b95cb21067bd"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.303954 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "14d12cdc-edb4-47c1-b245-b95cb21067bd" (UID: "14d12cdc-edb4-47c1-b245-b95cb21067bd"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.311437 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "14d12cdc-edb4-47c1-b245-b95cb21067bd" (UID: "14d12cdc-edb4-47c1-b245-b95cb21067bd"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.314717 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "14d12cdc-edb4-47c1-b245-b95cb21067bd" (UID: "14d12cdc-edb4-47c1-b245-b95cb21067bd"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.342172 4693 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.342209 4693 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.342220 4693 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.342231 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgkkn\" (UniqueName: \"kubernetes.io/projected/14d12cdc-edb4-47c1-b245-b95cb21067bd-kube-api-access-wgkkn\") on node \"crc\" DevicePath \"\"" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.342239 4693 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.342248 4693 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.342257 4693 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.342265 4693 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-inventory\") on node \"crc\" DevicePath \"\"" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.342273 4693 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/14d12cdc-edb4-47c1-b245-b95cb21067bd-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.375996 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="045e2499-3e6f-4ea0-8036-ba25d897c4da" path="/var/lib/kubelet/pods/045e2499-3e6f-4ea0-8036-ba25d897c4da/volumes" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.676382 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" event={"ID":"14d12cdc-edb4-47c1-b245-b95cb21067bd","Type":"ContainerDied","Data":"5b817de2fa917c07181a6df8813e6e15f5267e3543da3aad51e8f2b2a8eaa5a1"} Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.676428 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b817de2fa917c07181a6df8813e6e15f5267e3543da3aad51e8f2b2a8eaa5a1" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.676437 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-n87pd" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.779044 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh"] Oct 08 08:00:05 crc kubenswrapper[4693]: E1008 08:00:05.780056 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="444c3ec4-f711-4b79-94f9-ec5d22a9800c" containerName="collect-profiles" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.780259 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="444c3ec4-f711-4b79-94f9-ec5d22a9800c" containerName="collect-profiles" Oct 08 08:00:05 crc kubenswrapper[4693]: E1008 08:00:05.780408 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14d12cdc-edb4-47c1-b245-b95cb21067bd" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.780535 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="14d12cdc-edb4-47c1-b245-b95cb21067bd" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.781138 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="444c3ec4-f711-4b79-94f9-ec5d22a9800c" containerName="collect-profiles" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.781309 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="14d12cdc-edb4-47c1-b245-b95cb21067bd" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.782589 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.787761 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.788354 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.788952 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.789473 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zxm2d" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.789894 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.796051 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh"] Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.956040 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.956623 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.956861 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.957056 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.957291 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.957517 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" Oct 08 08:00:05 crc kubenswrapper[4693]: I1008 08:00:05.957701 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bgb8\" (UniqueName: \"kubernetes.io/projected/be7009a4-69bd-41cc-8fe8-02e5d79db395-kube-api-access-8bgb8\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" Oct 08 08:00:06 crc kubenswrapper[4693]: I1008 08:00:06.060174 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" Oct 08 08:00:06 crc kubenswrapper[4693]: I1008 08:00:06.060285 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" Oct 08 08:00:06 crc kubenswrapper[4693]: I1008 08:00:06.060320 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" Oct 08 08:00:06 crc kubenswrapper[4693]: I1008 08:00:06.061218 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" Oct 08 08:00:06 crc kubenswrapper[4693]: I1008 08:00:06.061274 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" Oct 08 08:00:06 crc kubenswrapper[4693]: I1008 08:00:06.061309 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bgb8\" (UniqueName: \"kubernetes.io/projected/be7009a4-69bd-41cc-8fe8-02e5d79db395-kube-api-access-8bgb8\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" Oct 08 08:00:06 crc kubenswrapper[4693]: I1008 08:00:06.061396 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" Oct 08 08:00:06 crc kubenswrapper[4693]: I1008 08:00:06.064545 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" Oct 08 08:00:06 crc kubenswrapper[4693]: I1008 08:00:06.065276 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" Oct 08 08:00:06 crc kubenswrapper[4693]: I1008 08:00:06.066213 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" Oct 08 08:00:06 crc kubenswrapper[4693]: I1008 08:00:06.066763 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" Oct 08 08:00:06 crc kubenswrapper[4693]: I1008 08:00:06.067031 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" Oct 08 08:00:06 crc kubenswrapper[4693]: I1008 08:00:06.069095 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" Oct 08 08:00:06 crc kubenswrapper[4693]: I1008 08:00:06.086195 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bgb8\" (UniqueName: \"kubernetes.io/projected/be7009a4-69bd-41cc-8fe8-02e5d79db395-kube-api-access-8bgb8\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" Oct 08 08:00:06 crc kubenswrapper[4693]: I1008 08:00:06.118898 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" Oct 08 08:00:06 crc kubenswrapper[4693]: I1008 08:00:06.777609 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh"] Oct 08 08:00:07 crc kubenswrapper[4693]: I1008 08:00:07.695244 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" event={"ID":"be7009a4-69bd-41cc-8fe8-02e5d79db395","Type":"ContainerStarted","Data":"0d1e4c44cb35d8b8405c359ef8dc57015bc2f7e35e1f02c43052befba7025d46"} Oct 08 08:00:08 crc kubenswrapper[4693]: I1008 08:00:08.710366 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" event={"ID":"be7009a4-69bd-41cc-8fe8-02e5d79db395","Type":"ContainerStarted","Data":"7e12ab150618e8ee37c6e824e9bc0e6e022a9b56af47fa39491fcdc50af7a146"} Oct 08 08:00:08 crc kubenswrapper[4693]: I1008 08:00:08.750115 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" podStartSLOduration=3.126176916 podStartE2EDuration="3.750096513s" podCreationTimestamp="2025-10-08 08:00:05 +0000 UTC" firstStartedPulling="2025-10-08 08:00:06.823527766 +0000 UTC m=+2592.194492721" lastFinishedPulling="2025-10-08 08:00:07.447447343 +0000 UTC m=+2592.818412318" observedRunningTime="2025-10-08 08:00:08.73316526 +0000 UTC m=+2594.104130275" watchObservedRunningTime="2025-10-08 08:00:08.750096513 +0000 UTC m=+2594.121061448" Oct 08 08:00:19 crc kubenswrapper[4693]: I1008 08:00:19.288956 4693 scope.go:117] "RemoveContainer" containerID="128cd73c137022c3f73a692754f06e5433217f4c599de5d00e541e729da6fee1" Oct 08 08:00:23 crc kubenswrapper[4693]: I1008 08:00:23.491032 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 08:00:23 crc kubenswrapper[4693]: I1008 08:00:23.491941 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 08:00:45 crc kubenswrapper[4693]: I1008 08:00:45.631552 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-g9wzw"] Oct 08 08:00:45 crc kubenswrapper[4693]: I1008 08:00:45.634851 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g9wzw" Oct 08 08:00:45 crc kubenswrapper[4693]: I1008 08:00:45.649731 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g9wzw"] Oct 08 08:00:45 crc kubenswrapper[4693]: I1008 08:00:45.707616 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa88900a-36e0-4582-85b5-0123d166c951-catalog-content\") pod \"redhat-marketplace-g9wzw\" (UID: \"aa88900a-36e0-4582-85b5-0123d166c951\") " pod="openshift-marketplace/redhat-marketplace-g9wzw" Oct 08 08:00:45 crc kubenswrapper[4693]: I1008 08:00:45.707752 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hg5l\" (UniqueName: \"kubernetes.io/projected/aa88900a-36e0-4582-85b5-0123d166c951-kube-api-access-7hg5l\") pod \"redhat-marketplace-g9wzw\" (UID: \"aa88900a-36e0-4582-85b5-0123d166c951\") " pod="openshift-marketplace/redhat-marketplace-g9wzw" Oct 08 08:00:45 crc kubenswrapper[4693]: I1008 08:00:45.708000 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa88900a-36e0-4582-85b5-0123d166c951-utilities\") pod \"redhat-marketplace-g9wzw\" (UID: \"aa88900a-36e0-4582-85b5-0123d166c951\") " pod="openshift-marketplace/redhat-marketplace-g9wzw" Oct 08 08:00:45 crc kubenswrapper[4693]: I1008 08:00:45.810153 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa88900a-36e0-4582-85b5-0123d166c951-utilities\") pod \"redhat-marketplace-g9wzw\" (UID: \"aa88900a-36e0-4582-85b5-0123d166c951\") " pod="openshift-marketplace/redhat-marketplace-g9wzw" Oct 08 08:00:45 crc kubenswrapper[4693]: I1008 08:00:45.810332 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa88900a-36e0-4582-85b5-0123d166c951-catalog-content\") pod \"redhat-marketplace-g9wzw\" (UID: \"aa88900a-36e0-4582-85b5-0123d166c951\") " pod="openshift-marketplace/redhat-marketplace-g9wzw" Oct 08 08:00:45 crc kubenswrapper[4693]: I1008 08:00:45.810366 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hg5l\" (UniqueName: \"kubernetes.io/projected/aa88900a-36e0-4582-85b5-0123d166c951-kube-api-access-7hg5l\") pod \"redhat-marketplace-g9wzw\" (UID: \"aa88900a-36e0-4582-85b5-0123d166c951\") " pod="openshift-marketplace/redhat-marketplace-g9wzw" Oct 08 08:00:45 crc kubenswrapper[4693]: I1008 08:00:45.810716 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa88900a-36e0-4582-85b5-0123d166c951-utilities\") pod \"redhat-marketplace-g9wzw\" (UID: \"aa88900a-36e0-4582-85b5-0123d166c951\") " pod="openshift-marketplace/redhat-marketplace-g9wzw" Oct 08 08:00:45 crc kubenswrapper[4693]: I1008 08:00:45.810855 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa88900a-36e0-4582-85b5-0123d166c951-catalog-content\") pod \"redhat-marketplace-g9wzw\" (UID: \"aa88900a-36e0-4582-85b5-0123d166c951\") " pod="openshift-marketplace/redhat-marketplace-g9wzw" Oct 08 08:00:45 crc kubenswrapper[4693]: I1008 08:00:45.834545 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hg5l\" (UniqueName: \"kubernetes.io/projected/aa88900a-36e0-4582-85b5-0123d166c951-kube-api-access-7hg5l\") pod \"redhat-marketplace-g9wzw\" (UID: \"aa88900a-36e0-4582-85b5-0123d166c951\") " pod="openshift-marketplace/redhat-marketplace-g9wzw" Oct 08 08:00:45 crc kubenswrapper[4693]: I1008 08:00:45.962419 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g9wzw" Oct 08 08:00:46 crc kubenswrapper[4693]: I1008 08:00:46.431427 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g9wzw"] Oct 08 08:00:47 crc kubenswrapper[4693]: I1008 08:00:47.174895 4693 generic.go:334] "Generic (PLEG): container finished" podID="aa88900a-36e0-4582-85b5-0123d166c951" containerID="d887ceb1a604073749ca5e09b334e741572c0c08e63cb4f4b5ffbedec0f029ca" exitCode=0 Oct 08 08:00:47 crc kubenswrapper[4693]: I1008 08:00:47.175025 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g9wzw" event={"ID":"aa88900a-36e0-4582-85b5-0123d166c951","Type":"ContainerDied","Data":"d887ceb1a604073749ca5e09b334e741572c0c08e63cb4f4b5ffbedec0f029ca"} Oct 08 08:00:47 crc kubenswrapper[4693]: I1008 08:00:47.175321 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g9wzw" event={"ID":"aa88900a-36e0-4582-85b5-0123d166c951","Type":"ContainerStarted","Data":"001d400e21c25bae122b48047337c8c5510551b23428884ee66b6c591a2d7334"} Oct 08 08:00:48 crc kubenswrapper[4693]: I1008 08:00:48.186108 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g9wzw" event={"ID":"aa88900a-36e0-4582-85b5-0123d166c951","Type":"ContainerStarted","Data":"02c5d9a6d8e703d2ce3c9c333fb30deb365c33aeeee2dfccbd8814c4350fa0f3"} Oct 08 08:00:49 crc kubenswrapper[4693]: I1008 08:00:49.201450 4693 generic.go:334] "Generic (PLEG): container finished" podID="aa88900a-36e0-4582-85b5-0123d166c951" containerID="02c5d9a6d8e703d2ce3c9c333fb30deb365c33aeeee2dfccbd8814c4350fa0f3" exitCode=0 Oct 08 08:00:49 crc kubenswrapper[4693]: I1008 08:00:49.201580 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g9wzw" event={"ID":"aa88900a-36e0-4582-85b5-0123d166c951","Type":"ContainerDied","Data":"02c5d9a6d8e703d2ce3c9c333fb30deb365c33aeeee2dfccbd8814c4350fa0f3"} Oct 08 08:00:51 crc kubenswrapper[4693]: I1008 08:00:51.231606 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g9wzw" event={"ID":"aa88900a-36e0-4582-85b5-0123d166c951","Type":"ContainerStarted","Data":"19acd1f268f6ac527a3277e92afb0dc79a772021b95256b747a691196070b1ed"} Oct 08 08:00:51 crc kubenswrapper[4693]: I1008 08:00:51.253436 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-g9wzw" podStartSLOduration=2.649248107 podStartE2EDuration="6.253414808s" podCreationTimestamp="2025-10-08 08:00:45 +0000 UTC" firstStartedPulling="2025-10-08 08:00:47.179706545 +0000 UTC m=+2632.550671490" lastFinishedPulling="2025-10-08 08:00:50.783873256 +0000 UTC m=+2636.154838191" observedRunningTime="2025-10-08 08:00:51.251279162 +0000 UTC m=+2636.622244097" watchObservedRunningTime="2025-10-08 08:00:51.253414808 +0000 UTC m=+2636.624379743" Oct 08 08:00:53 crc kubenswrapper[4693]: I1008 08:00:53.490078 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 08:00:53 crc kubenswrapper[4693]: I1008 08:00:53.490553 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 08:00:55 crc kubenswrapper[4693]: I1008 08:00:55.963173 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-g9wzw" Oct 08 08:00:55 crc kubenswrapper[4693]: I1008 08:00:55.963952 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-g9wzw" Oct 08 08:00:56 crc kubenswrapper[4693]: I1008 08:00:56.054253 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-g9wzw" Oct 08 08:00:56 crc kubenswrapper[4693]: I1008 08:00:56.375204 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-g9wzw" Oct 08 08:00:56 crc kubenswrapper[4693]: I1008 08:00:56.441956 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g9wzw"] Oct 08 08:00:58 crc kubenswrapper[4693]: I1008 08:00:58.321364 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-g9wzw" podUID="aa88900a-36e0-4582-85b5-0123d166c951" containerName="registry-server" containerID="cri-o://19acd1f268f6ac527a3277e92afb0dc79a772021b95256b747a691196070b1ed" gracePeriod=2 Oct 08 08:00:58 crc kubenswrapper[4693]: I1008 08:00:58.851649 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g9wzw" Oct 08 08:00:58 crc kubenswrapper[4693]: I1008 08:00:58.916910 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa88900a-36e0-4582-85b5-0123d166c951-utilities\") pod \"aa88900a-36e0-4582-85b5-0123d166c951\" (UID: \"aa88900a-36e0-4582-85b5-0123d166c951\") " Oct 08 08:00:58 crc kubenswrapper[4693]: I1008 08:00:58.917303 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7hg5l\" (UniqueName: \"kubernetes.io/projected/aa88900a-36e0-4582-85b5-0123d166c951-kube-api-access-7hg5l\") pod \"aa88900a-36e0-4582-85b5-0123d166c951\" (UID: \"aa88900a-36e0-4582-85b5-0123d166c951\") " Oct 08 08:00:58 crc kubenswrapper[4693]: I1008 08:00:58.917482 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa88900a-36e0-4582-85b5-0123d166c951-catalog-content\") pod \"aa88900a-36e0-4582-85b5-0123d166c951\" (UID: \"aa88900a-36e0-4582-85b5-0123d166c951\") " Oct 08 08:00:58 crc kubenswrapper[4693]: I1008 08:00:58.917781 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa88900a-36e0-4582-85b5-0123d166c951-utilities" (OuterVolumeSpecName: "utilities") pod "aa88900a-36e0-4582-85b5-0123d166c951" (UID: "aa88900a-36e0-4582-85b5-0123d166c951"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:00:58 crc kubenswrapper[4693]: I1008 08:00:58.918271 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa88900a-36e0-4582-85b5-0123d166c951-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 08:00:58 crc kubenswrapper[4693]: I1008 08:00:58.934580 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa88900a-36e0-4582-85b5-0123d166c951-kube-api-access-7hg5l" (OuterVolumeSpecName: "kube-api-access-7hg5l") pod "aa88900a-36e0-4582-85b5-0123d166c951" (UID: "aa88900a-36e0-4582-85b5-0123d166c951"). InnerVolumeSpecName "kube-api-access-7hg5l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 08:00:58 crc kubenswrapper[4693]: I1008 08:00:58.937035 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa88900a-36e0-4582-85b5-0123d166c951-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aa88900a-36e0-4582-85b5-0123d166c951" (UID: "aa88900a-36e0-4582-85b5-0123d166c951"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:00:59 crc kubenswrapper[4693]: I1008 08:00:59.020021 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7hg5l\" (UniqueName: \"kubernetes.io/projected/aa88900a-36e0-4582-85b5-0123d166c951-kube-api-access-7hg5l\") on node \"crc\" DevicePath \"\"" Oct 08 08:00:59 crc kubenswrapper[4693]: I1008 08:00:59.020093 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa88900a-36e0-4582-85b5-0123d166c951-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 08:00:59 crc kubenswrapper[4693]: I1008 08:00:59.334265 4693 generic.go:334] "Generic (PLEG): container finished" podID="aa88900a-36e0-4582-85b5-0123d166c951" containerID="19acd1f268f6ac527a3277e92afb0dc79a772021b95256b747a691196070b1ed" exitCode=0 Oct 08 08:00:59 crc kubenswrapper[4693]: I1008 08:00:59.334328 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g9wzw" event={"ID":"aa88900a-36e0-4582-85b5-0123d166c951","Type":"ContainerDied","Data":"19acd1f268f6ac527a3277e92afb0dc79a772021b95256b747a691196070b1ed"} Oct 08 08:00:59 crc kubenswrapper[4693]: I1008 08:00:59.334363 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g9wzw" event={"ID":"aa88900a-36e0-4582-85b5-0123d166c951","Type":"ContainerDied","Data":"001d400e21c25bae122b48047337c8c5510551b23428884ee66b6c591a2d7334"} Oct 08 08:00:59 crc kubenswrapper[4693]: I1008 08:00:59.334385 4693 scope.go:117] "RemoveContainer" containerID="19acd1f268f6ac527a3277e92afb0dc79a772021b95256b747a691196070b1ed" Oct 08 08:00:59 crc kubenswrapper[4693]: I1008 08:00:59.334458 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g9wzw" Oct 08 08:00:59 crc kubenswrapper[4693]: I1008 08:00:59.370178 4693 scope.go:117] "RemoveContainer" containerID="02c5d9a6d8e703d2ce3c9c333fb30deb365c33aeeee2dfccbd8814c4350fa0f3" Oct 08 08:00:59 crc kubenswrapper[4693]: I1008 08:00:59.384943 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g9wzw"] Oct 08 08:00:59 crc kubenswrapper[4693]: I1008 08:00:59.389878 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-g9wzw"] Oct 08 08:00:59 crc kubenswrapper[4693]: I1008 08:00:59.394798 4693 scope.go:117] "RemoveContainer" containerID="d887ceb1a604073749ca5e09b334e741572c0c08e63cb4f4b5ffbedec0f029ca" Oct 08 08:00:59 crc kubenswrapper[4693]: I1008 08:00:59.446628 4693 scope.go:117] "RemoveContainer" containerID="19acd1f268f6ac527a3277e92afb0dc79a772021b95256b747a691196070b1ed" Oct 08 08:00:59 crc kubenswrapper[4693]: E1008 08:00:59.447469 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19acd1f268f6ac527a3277e92afb0dc79a772021b95256b747a691196070b1ed\": container with ID starting with 19acd1f268f6ac527a3277e92afb0dc79a772021b95256b747a691196070b1ed not found: ID does not exist" containerID="19acd1f268f6ac527a3277e92afb0dc79a772021b95256b747a691196070b1ed" Oct 08 08:00:59 crc kubenswrapper[4693]: I1008 08:00:59.447533 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19acd1f268f6ac527a3277e92afb0dc79a772021b95256b747a691196070b1ed"} err="failed to get container status \"19acd1f268f6ac527a3277e92afb0dc79a772021b95256b747a691196070b1ed\": rpc error: code = NotFound desc = could not find container \"19acd1f268f6ac527a3277e92afb0dc79a772021b95256b747a691196070b1ed\": container with ID starting with 19acd1f268f6ac527a3277e92afb0dc79a772021b95256b747a691196070b1ed not found: ID does not exist" Oct 08 08:00:59 crc kubenswrapper[4693]: I1008 08:00:59.447576 4693 scope.go:117] "RemoveContainer" containerID="02c5d9a6d8e703d2ce3c9c333fb30deb365c33aeeee2dfccbd8814c4350fa0f3" Oct 08 08:00:59 crc kubenswrapper[4693]: E1008 08:00:59.448148 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02c5d9a6d8e703d2ce3c9c333fb30deb365c33aeeee2dfccbd8814c4350fa0f3\": container with ID starting with 02c5d9a6d8e703d2ce3c9c333fb30deb365c33aeeee2dfccbd8814c4350fa0f3 not found: ID does not exist" containerID="02c5d9a6d8e703d2ce3c9c333fb30deb365c33aeeee2dfccbd8814c4350fa0f3" Oct 08 08:00:59 crc kubenswrapper[4693]: I1008 08:00:59.448194 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02c5d9a6d8e703d2ce3c9c333fb30deb365c33aeeee2dfccbd8814c4350fa0f3"} err="failed to get container status \"02c5d9a6d8e703d2ce3c9c333fb30deb365c33aeeee2dfccbd8814c4350fa0f3\": rpc error: code = NotFound desc = could not find container \"02c5d9a6d8e703d2ce3c9c333fb30deb365c33aeeee2dfccbd8814c4350fa0f3\": container with ID starting with 02c5d9a6d8e703d2ce3c9c333fb30deb365c33aeeee2dfccbd8814c4350fa0f3 not found: ID does not exist" Oct 08 08:00:59 crc kubenswrapper[4693]: I1008 08:00:59.448224 4693 scope.go:117] "RemoveContainer" containerID="d887ceb1a604073749ca5e09b334e741572c0c08e63cb4f4b5ffbedec0f029ca" Oct 08 08:00:59 crc kubenswrapper[4693]: E1008 08:00:59.448531 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d887ceb1a604073749ca5e09b334e741572c0c08e63cb4f4b5ffbedec0f029ca\": container with ID starting with d887ceb1a604073749ca5e09b334e741572c0c08e63cb4f4b5ffbedec0f029ca not found: ID does not exist" containerID="d887ceb1a604073749ca5e09b334e741572c0c08e63cb4f4b5ffbedec0f029ca" Oct 08 08:00:59 crc kubenswrapper[4693]: I1008 08:00:59.448573 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d887ceb1a604073749ca5e09b334e741572c0c08e63cb4f4b5ffbedec0f029ca"} err="failed to get container status \"d887ceb1a604073749ca5e09b334e741572c0c08e63cb4f4b5ffbedec0f029ca\": rpc error: code = NotFound desc = could not find container \"d887ceb1a604073749ca5e09b334e741572c0c08e63cb4f4b5ffbedec0f029ca\": container with ID starting with d887ceb1a604073749ca5e09b334e741572c0c08e63cb4f4b5ffbedec0f029ca not found: ID does not exist" Oct 08 08:01:00 crc kubenswrapper[4693]: I1008 08:01:00.213673 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29331841-z6dhr"] Oct 08 08:01:00 crc kubenswrapper[4693]: E1008 08:01:00.214461 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa88900a-36e0-4582-85b5-0123d166c951" containerName="extract-utilities" Oct 08 08:01:00 crc kubenswrapper[4693]: I1008 08:01:00.214493 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa88900a-36e0-4582-85b5-0123d166c951" containerName="extract-utilities" Oct 08 08:01:00 crc kubenswrapper[4693]: E1008 08:01:00.214533 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa88900a-36e0-4582-85b5-0123d166c951" containerName="registry-server" Oct 08 08:01:00 crc kubenswrapper[4693]: I1008 08:01:00.214544 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa88900a-36e0-4582-85b5-0123d166c951" containerName="registry-server" Oct 08 08:01:00 crc kubenswrapper[4693]: E1008 08:01:00.214560 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa88900a-36e0-4582-85b5-0123d166c951" containerName="extract-content" Oct 08 08:01:00 crc kubenswrapper[4693]: I1008 08:01:00.214569 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa88900a-36e0-4582-85b5-0123d166c951" containerName="extract-content" Oct 08 08:01:00 crc kubenswrapper[4693]: I1008 08:01:00.214809 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa88900a-36e0-4582-85b5-0123d166c951" containerName="registry-server" Oct 08 08:01:00 crc kubenswrapper[4693]: I1008 08:01:00.215652 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29331841-z6dhr" Oct 08 08:01:00 crc kubenswrapper[4693]: I1008 08:01:00.228799 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29331841-z6dhr"] Oct 08 08:01:00 crc kubenswrapper[4693]: I1008 08:01:00.364948 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e12eea6-ac5f-47c8-810b-b304ee039431-combined-ca-bundle\") pod \"keystone-cron-29331841-z6dhr\" (UID: \"0e12eea6-ac5f-47c8-810b-b304ee039431\") " pod="openstack/keystone-cron-29331841-z6dhr" Oct 08 08:01:00 crc kubenswrapper[4693]: I1008 08:01:00.365246 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e12eea6-ac5f-47c8-810b-b304ee039431-config-data\") pod \"keystone-cron-29331841-z6dhr\" (UID: \"0e12eea6-ac5f-47c8-810b-b304ee039431\") " pod="openstack/keystone-cron-29331841-z6dhr" Oct 08 08:01:00 crc kubenswrapper[4693]: I1008 08:01:00.365350 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0e12eea6-ac5f-47c8-810b-b304ee039431-fernet-keys\") pod \"keystone-cron-29331841-z6dhr\" (UID: \"0e12eea6-ac5f-47c8-810b-b304ee039431\") " pod="openstack/keystone-cron-29331841-z6dhr" Oct 08 08:01:00 crc kubenswrapper[4693]: I1008 08:01:00.365377 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5dpxt\" (UniqueName: \"kubernetes.io/projected/0e12eea6-ac5f-47c8-810b-b304ee039431-kube-api-access-5dpxt\") pod \"keystone-cron-29331841-z6dhr\" (UID: \"0e12eea6-ac5f-47c8-810b-b304ee039431\") " pod="openstack/keystone-cron-29331841-z6dhr" Oct 08 08:01:00 crc kubenswrapper[4693]: I1008 08:01:00.467920 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e12eea6-ac5f-47c8-810b-b304ee039431-combined-ca-bundle\") pod \"keystone-cron-29331841-z6dhr\" (UID: \"0e12eea6-ac5f-47c8-810b-b304ee039431\") " pod="openstack/keystone-cron-29331841-z6dhr" Oct 08 08:01:00 crc kubenswrapper[4693]: I1008 08:01:00.468017 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e12eea6-ac5f-47c8-810b-b304ee039431-config-data\") pod \"keystone-cron-29331841-z6dhr\" (UID: \"0e12eea6-ac5f-47c8-810b-b304ee039431\") " pod="openstack/keystone-cron-29331841-z6dhr" Oct 08 08:01:00 crc kubenswrapper[4693]: I1008 08:01:00.468359 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0e12eea6-ac5f-47c8-810b-b304ee039431-fernet-keys\") pod \"keystone-cron-29331841-z6dhr\" (UID: \"0e12eea6-ac5f-47c8-810b-b304ee039431\") " pod="openstack/keystone-cron-29331841-z6dhr" Oct 08 08:01:00 crc kubenswrapper[4693]: I1008 08:01:00.468433 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5dpxt\" (UniqueName: \"kubernetes.io/projected/0e12eea6-ac5f-47c8-810b-b304ee039431-kube-api-access-5dpxt\") pod \"keystone-cron-29331841-z6dhr\" (UID: \"0e12eea6-ac5f-47c8-810b-b304ee039431\") " pod="openstack/keystone-cron-29331841-z6dhr" Oct 08 08:01:00 crc kubenswrapper[4693]: I1008 08:01:00.477322 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e12eea6-ac5f-47c8-810b-b304ee039431-config-data\") pod \"keystone-cron-29331841-z6dhr\" (UID: \"0e12eea6-ac5f-47c8-810b-b304ee039431\") " pod="openstack/keystone-cron-29331841-z6dhr" Oct 08 08:01:00 crc kubenswrapper[4693]: I1008 08:01:00.477450 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0e12eea6-ac5f-47c8-810b-b304ee039431-fernet-keys\") pod \"keystone-cron-29331841-z6dhr\" (UID: \"0e12eea6-ac5f-47c8-810b-b304ee039431\") " pod="openstack/keystone-cron-29331841-z6dhr" Oct 08 08:01:00 crc kubenswrapper[4693]: I1008 08:01:00.477496 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e12eea6-ac5f-47c8-810b-b304ee039431-combined-ca-bundle\") pod \"keystone-cron-29331841-z6dhr\" (UID: \"0e12eea6-ac5f-47c8-810b-b304ee039431\") " pod="openstack/keystone-cron-29331841-z6dhr" Oct 08 08:01:00 crc kubenswrapper[4693]: I1008 08:01:00.489982 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5dpxt\" (UniqueName: \"kubernetes.io/projected/0e12eea6-ac5f-47c8-810b-b304ee039431-kube-api-access-5dpxt\") pod \"keystone-cron-29331841-z6dhr\" (UID: \"0e12eea6-ac5f-47c8-810b-b304ee039431\") " pod="openstack/keystone-cron-29331841-z6dhr" Oct 08 08:01:00 crc kubenswrapper[4693]: I1008 08:01:00.531045 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29331841-z6dhr" Oct 08 08:01:01 crc kubenswrapper[4693]: I1008 08:01:01.014236 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29331841-z6dhr"] Oct 08 08:01:01 crc kubenswrapper[4693]: I1008 08:01:01.358069 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29331841-z6dhr" event={"ID":"0e12eea6-ac5f-47c8-810b-b304ee039431","Type":"ContainerStarted","Data":"58c7975f8fe3f6b7ce625fa2c93a074eaddf84999541aea39b0e5227acc427a2"} Oct 08 08:01:01 crc kubenswrapper[4693]: I1008 08:01:01.358520 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29331841-z6dhr" event={"ID":"0e12eea6-ac5f-47c8-810b-b304ee039431","Type":"ContainerStarted","Data":"b04601fa3270be482e9b1e5f006d048753cd69ddda6832f513868315f56caf5b"} Oct 08 08:01:01 crc kubenswrapper[4693]: I1008 08:01:01.385848 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29331841-z6dhr" podStartSLOduration=1.385805384 podStartE2EDuration="1.385805384s" podCreationTimestamp="2025-10-08 08:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 08:01:01.384612982 +0000 UTC m=+2646.755577967" watchObservedRunningTime="2025-10-08 08:01:01.385805384 +0000 UTC m=+2646.756770329" Oct 08 08:01:01 crc kubenswrapper[4693]: I1008 08:01:01.386538 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa88900a-36e0-4582-85b5-0123d166c951" path="/var/lib/kubelet/pods/aa88900a-36e0-4582-85b5-0123d166c951/volumes" Oct 08 08:01:03 crc kubenswrapper[4693]: I1008 08:01:03.387475 4693 generic.go:334] "Generic (PLEG): container finished" podID="0e12eea6-ac5f-47c8-810b-b304ee039431" containerID="58c7975f8fe3f6b7ce625fa2c93a074eaddf84999541aea39b0e5227acc427a2" exitCode=0 Oct 08 08:01:03 crc kubenswrapper[4693]: I1008 08:01:03.387514 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29331841-z6dhr" event={"ID":"0e12eea6-ac5f-47c8-810b-b304ee039431","Type":"ContainerDied","Data":"58c7975f8fe3f6b7ce625fa2c93a074eaddf84999541aea39b0e5227acc427a2"} Oct 08 08:01:04 crc kubenswrapper[4693]: I1008 08:01:04.790637 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29331841-z6dhr" Oct 08 08:01:04 crc kubenswrapper[4693]: I1008 08:01:04.865902 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e12eea6-ac5f-47c8-810b-b304ee039431-config-data\") pod \"0e12eea6-ac5f-47c8-810b-b304ee039431\" (UID: \"0e12eea6-ac5f-47c8-810b-b304ee039431\") " Oct 08 08:01:04 crc kubenswrapper[4693]: I1008 08:01:04.865979 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0e12eea6-ac5f-47c8-810b-b304ee039431-fernet-keys\") pod \"0e12eea6-ac5f-47c8-810b-b304ee039431\" (UID: \"0e12eea6-ac5f-47c8-810b-b304ee039431\") " Oct 08 08:01:04 crc kubenswrapper[4693]: I1008 08:01:04.866166 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e12eea6-ac5f-47c8-810b-b304ee039431-combined-ca-bundle\") pod \"0e12eea6-ac5f-47c8-810b-b304ee039431\" (UID: \"0e12eea6-ac5f-47c8-810b-b304ee039431\") " Oct 08 08:01:04 crc kubenswrapper[4693]: I1008 08:01:04.866316 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5dpxt\" (UniqueName: \"kubernetes.io/projected/0e12eea6-ac5f-47c8-810b-b304ee039431-kube-api-access-5dpxt\") pod \"0e12eea6-ac5f-47c8-810b-b304ee039431\" (UID: \"0e12eea6-ac5f-47c8-810b-b304ee039431\") " Oct 08 08:01:04 crc kubenswrapper[4693]: I1008 08:01:04.872647 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e12eea6-ac5f-47c8-810b-b304ee039431-kube-api-access-5dpxt" (OuterVolumeSpecName: "kube-api-access-5dpxt") pod "0e12eea6-ac5f-47c8-810b-b304ee039431" (UID: "0e12eea6-ac5f-47c8-810b-b304ee039431"). InnerVolumeSpecName "kube-api-access-5dpxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 08:01:04 crc kubenswrapper[4693]: I1008 08:01:04.873753 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e12eea6-ac5f-47c8-810b-b304ee039431-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "0e12eea6-ac5f-47c8-810b-b304ee039431" (UID: "0e12eea6-ac5f-47c8-810b-b304ee039431"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 08:01:04 crc kubenswrapper[4693]: I1008 08:01:04.906301 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e12eea6-ac5f-47c8-810b-b304ee039431-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0e12eea6-ac5f-47c8-810b-b304ee039431" (UID: "0e12eea6-ac5f-47c8-810b-b304ee039431"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 08:01:04 crc kubenswrapper[4693]: I1008 08:01:04.940262 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e12eea6-ac5f-47c8-810b-b304ee039431-config-data" (OuterVolumeSpecName: "config-data") pod "0e12eea6-ac5f-47c8-810b-b304ee039431" (UID: "0e12eea6-ac5f-47c8-810b-b304ee039431"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 08:01:04 crc kubenswrapper[4693]: I1008 08:01:04.968500 4693 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e12eea6-ac5f-47c8-810b-b304ee039431-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 08:01:04 crc kubenswrapper[4693]: I1008 08:01:04.968550 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5dpxt\" (UniqueName: \"kubernetes.io/projected/0e12eea6-ac5f-47c8-810b-b304ee039431-kube-api-access-5dpxt\") on node \"crc\" DevicePath \"\"" Oct 08 08:01:04 crc kubenswrapper[4693]: I1008 08:01:04.968570 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e12eea6-ac5f-47c8-810b-b304ee039431-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 08:01:04 crc kubenswrapper[4693]: I1008 08:01:04.968587 4693 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0e12eea6-ac5f-47c8-810b-b304ee039431-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 08 08:01:05 crc kubenswrapper[4693]: I1008 08:01:05.416170 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29331841-z6dhr" event={"ID":"0e12eea6-ac5f-47c8-810b-b304ee039431","Type":"ContainerDied","Data":"b04601fa3270be482e9b1e5f006d048753cd69ddda6832f513868315f56caf5b"} Oct 08 08:01:05 crc kubenswrapper[4693]: I1008 08:01:05.416618 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b04601fa3270be482e9b1e5f006d048753cd69ddda6832f513868315f56caf5b" Oct 08 08:01:05 crc kubenswrapper[4693]: I1008 08:01:05.416278 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29331841-z6dhr" Oct 08 08:01:23 crc kubenswrapper[4693]: I1008 08:01:23.490365 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 08:01:23 crc kubenswrapper[4693]: I1008 08:01:23.491037 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 08:01:23 crc kubenswrapper[4693]: I1008 08:01:23.491104 4693 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 08:01:23 crc kubenswrapper[4693]: I1008 08:01:23.492098 4693 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7f7ddc6a4e37f0f16608812de5d34fc6d614c42d995eb4b20e05072ddcc7127c"} pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 08 08:01:23 crc kubenswrapper[4693]: I1008 08:01:23.492189 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" containerID="cri-o://7f7ddc6a4e37f0f16608812de5d34fc6d614c42d995eb4b20e05072ddcc7127c" gracePeriod=600 Oct 08 08:01:24 crc kubenswrapper[4693]: I1008 08:01:24.617752 4693 generic.go:334] "Generic (PLEG): container finished" podID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerID="7f7ddc6a4e37f0f16608812de5d34fc6d614c42d995eb4b20e05072ddcc7127c" exitCode=0 Oct 08 08:01:24 crc kubenswrapper[4693]: I1008 08:01:24.617858 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerDied","Data":"7f7ddc6a4e37f0f16608812de5d34fc6d614c42d995eb4b20e05072ddcc7127c"} Oct 08 08:01:24 crc kubenswrapper[4693]: I1008 08:01:24.618393 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerStarted","Data":"288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988"} Oct 08 08:01:24 crc kubenswrapper[4693]: I1008 08:01:24.618420 4693 scope.go:117] "RemoveContainer" containerID="c52bea0d4fb391aca9330905213d81f211240260f50dc1b2391697c613f9ad9f" Oct 08 08:01:29 crc kubenswrapper[4693]: I1008 08:01:29.549927 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6b87x"] Oct 08 08:01:29 crc kubenswrapper[4693]: E1008 08:01:29.551596 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e12eea6-ac5f-47c8-810b-b304ee039431" containerName="keystone-cron" Oct 08 08:01:29 crc kubenswrapper[4693]: I1008 08:01:29.551667 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e12eea6-ac5f-47c8-810b-b304ee039431" containerName="keystone-cron" Oct 08 08:01:29 crc kubenswrapper[4693]: I1008 08:01:29.551955 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e12eea6-ac5f-47c8-810b-b304ee039431" containerName="keystone-cron" Oct 08 08:01:29 crc kubenswrapper[4693]: I1008 08:01:29.553244 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6b87x" Oct 08 08:01:29 crc kubenswrapper[4693]: I1008 08:01:29.565634 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6b87x"] Oct 08 08:01:29 crc kubenswrapper[4693]: I1008 08:01:29.680392 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxlr5\" (UniqueName: \"kubernetes.io/projected/90142f0b-e152-479f-b4c1-a3876d6ddd3f-kube-api-access-kxlr5\") pod \"redhat-operators-6b87x\" (UID: \"90142f0b-e152-479f-b4c1-a3876d6ddd3f\") " pod="openshift-marketplace/redhat-operators-6b87x" Oct 08 08:01:29 crc kubenswrapper[4693]: I1008 08:01:29.680606 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90142f0b-e152-479f-b4c1-a3876d6ddd3f-catalog-content\") pod \"redhat-operators-6b87x\" (UID: \"90142f0b-e152-479f-b4c1-a3876d6ddd3f\") " pod="openshift-marketplace/redhat-operators-6b87x" Oct 08 08:01:29 crc kubenswrapper[4693]: I1008 08:01:29.680829 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90142f0b-e152-479f-b4c1-a3876d6ddd3f-utilities\") pod \"redhat-operators-6b87x\" (UID: \"90142f0b-e152-479f-b4c1-a3876d6ddd3f\") " pod="openshift-marketplace/redhat-operators-6b87x" Oct 08 08:01:29 crc kubenswrapper[4693]: I1008 08:01:29.782629 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90142f0b-e152-479f-b4c1-a3876d6ddd3f-utilities\") pod \"redhat-operators-6b87x\" (UID: \"90142f0b-e152-479f-b4c1-a3876d6ddd3f\") " pod="openshift-marketplace/redhat-operators-6b87x" Oct 08 08:01:29 crc kubenswrapper[4693]: I1008 08:01:29.783055 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxlr5\" (UniqueName: \"kubernetes.io/projected/90142f0b-e152-479f-b4c1-a3876d6ddd3f-kube-api-access-kxlr5\") pod \"redhat-operators-6b87x\" (UID: \"90142f0b-e152-479f-b4c1-a3876d6ddd3f\") " pod="openshift-marketplace/redhat-operators-6b87x" Oct 08 08:01:29 crc kubenswrapper[4693]: I1008 08:01:29.783124 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90142f0b-e152-479f-b4c1-a3876d6ddd3f-catalog-content\") pod \"redhat-operators-6b87x\" (UID: \"90142f0b-e152-479f-b4c1-a3876d6ddd3f\") " pod="openshift-marketplace/redhat-operators-6b87x" Oct 08 08:01:29 crc kubenswrapper[4693]: I1008 08:01:29.783415 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90142f0b-e152-479f-b4c1-a3876d6ddd3f-utilities\") pod \"redhat-operators-6b87x\" (UID: \"90142f0b-e152-479f-b4c1-a3876d6ddd3f\") " pod="openshift-marketplace/redhat-operators-6b87x" Oct 08 08:01:29 crc kubenswrapper[4693]: I1008 08:01:29.783615 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90142f0b-e152-479f-b4c1-a3876d6ddd3f-catalog-content\") pod \"redhat-operators-6b87x\" (UID: \"90142f0b-e152-479f-b4c1-a3876d6ddd3f\") " pod="openshift-marketplace/redhat-operators-6b87x" Oct 08 08:01:29 crc kubenswrapper[4693]: I1008 08:01:29.814274 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxlr5\" (UniqueName: \"kubernetes.io/projected/90142f0b-e152-479f-b4c1-a3876d6ddd3f-kube-api-access-kxlr5\") pod \"redhat-operators-6b87x\" (UID: \"90142f0b-e152-479f-b4c1-a3876d6ddd3f\") " pod="openshift-marketplace/redhat-operators-6b87x" Oct 08 08:01:29 crc kubenswrapper[4693]: I1008 08:01:29.875544 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6b87x" Oct 08 08:01:30 crc kubenswrapper[4693]: I1008 08:01:30.325923 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6b87x"] Oct 08 08:01:30 crc kubenswrapper[4693]: W1008 08:01:30.338714 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod90142f0b_e152_479f_b4c1_a3876d6ddd3f.slice/crio-1b5b645577226c33007fc59bb17589e882161b657c97cdead0f65e1ef936b205 WatchSource:0}: Error finding container 1b5b645577226c33007fc59bb17589e882161b657c97cdead0f65e1ef936b205: Status 404 returned error can't find the container with id 1b5b645577226c33007fc59bb17589e882161b657c97cdead0f65e1ef936b205 Oct 08 08:01:30 crc kubenswrapper[4693]: I1008 08:01:30.713510 4693 generic.go:334] "Generic (PLEG): container finished" podID="90142f0b-e152-479f-b4c1-a3876d6ddd3f" containerID="9f7ad0c84c62cefecdc97f0ce23d92c11b7c54be488db3596255918ce060a4bb" exitCode=0 Oct 08 08:01:30 crc kubenswrapper[4693]: I1008 08:01:30.713731 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6b87x" event={"ID":"90142f0b-e152-479f-b4c1-a3876d6ddd3f","Type":"ContainerDied","Data":"9f7ad0c84c62cefecdc97f0ce23d92c11b7c54be488db3596255918ce060a4bb"} Oct 08 08:01:30 crc kubenswrapper[4693]: I1008 08:01:30.713758 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6b87x" event={"ID":"90142f0b-e152-479f-b4c1-a3876d6ddd3f","Type":"ContainerStarted","Data":"1b5b645577226c33007fc59bb17589e882161b657c97cdead0f65e1ef936b205"} Oct 08 08:01:30 crc kubenswrapper[4693]: I1008 08:01:30.715607 4693 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 08 08:01:31 crc kubenswrapper[4693]: I1008 08:01:31.726634 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6b87x" event={"ID":"90142f0b-e152-479f-b4c1-a3876d6ddd3f","Type":"ContainerStarted","Data":"76342ff800f02ed470c2a6f959547fb90462340d9ad3c71e3fde58cc31d1047a"} Oct 08 08:01:32 crc kubenswrapper[4693]: I1008 08:01:32.739261 4693 generic.go:334] "Generic (PLEG): container finished" podID="90142f0b-e152-479f-b4c1-a3876d6ddd3f" containerID="76342ff800f02ed470c2a6f959547fb90462340d9ad3c71e3fde58cc31d1047a" exitCode=0 Oct 08 08:01:32 crc kubenswrapper[4693]: I1008 08:01:32.739315 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6b87x" event={"ID":"90142f0b-e152-479f-b4c1-a3876d6ddd3f","Type":"ContainerDied","Data":"76342ff800f02ed470c2a6f959547fb90462340d9ad3c71e3fde58cc31d1047a"} Oct 08 08:01:33 crc kubenswrapper[4693]: I1008 08:01:33.753552 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6b87x" event={"ID":"90142f0b-e152-479f-b4c1-a3876d6ddd3f","Type":"ContainerStarted","Data":"d14f6e21d0aedbd771e1f06a941a7de0b39291344da580cecab7880b6b1e8418"} Oct 08 08:01:33 crc kubenswrapper[4693]: I1008 08:01:33.798208 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6b87x" podStartSLOduration=2.362170396 podStartE2EDuration="4.798183561s" podCreationTimestamp="2025-10-08 08:01:29 +0000 UTC" firstStartedPulling="2025-10-08 08:01:30.71541274 +0000 UTC m=+2676.086377675" lastFinishedPulling="2025-10-08 08:01:33.151425865 +0000 UTC m=+2678.522390840" observedRunningTime="2025-10-08 08:01:33.781395651 +0000 UTC m=+2679.152360626" watchObservedRunningTime="2025-10-08 08:01:33.798183561 +0000 UTC m=+2679.169148526" Oct 08 08:01:39 crc kubenswrapper[4693]: I1008 08:01:39.875983 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6b87x" Oct 08 08:01:39 crc kubenswrapper[4693]: I1008 08:01:39.876621 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6b87x" Oct 08 08:01:39 crc kubenswrapper[4693]: I1008 08:01:39.950791 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6b87x" Oct 08 08:01:40 crc kubenswrapper[4693]: I1008 08:01:40.918280 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6b87x" Oct 08 08:01:41 crc kubenswrapper[4693]: I1008 08:01:41.006043 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6b87x"] Oct 08 08:01:42 crc kubenswrapper[4693]: I1008 08:01:42.863040 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6b87x" podUID="90142f0b-e152-479f-b4c1-a3876d6ddd3f" containerName="registry-server" containerID="cri-o://d14f6e21d0aedbd771e1f06a941a7de0b39291344da580cecab7880b6b1e8418" gracePeriod=2 Oct 08 08:01:44 crc kubenswrapper[4693]: I1008 08:01:44.524595 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6b87x" Oct 08 08:01:44 crc kubenswrapper[4693]: I1008 08:01:44.609494 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90142f0b-e152-479f-b4c1-a3876d6ddd3f-catalog-content\") pod \"90142f0b-e152-479f-b4c1-a3876d6ddd3f\" (UID: \"90142f0b-e152-479f-b4c1-a3876d6ddd3f\") " Oct 08 08:01:44 crc kubenswrapper[4693]: I1008 08:01:44.609707 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90142f0b-e152-479f-b4c1-a3876d6ddd3f-utilities\") pod \"90142f0b-e152-479f-b4c1-a3876d6ddd3f\" (UID: \"90142f0b-e152-479f-b4c1-a3876d6ddd3f\") " Oct 08 08:01:44 crc kubenswrapper[4693]: I1008 08:01:44.609781 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxlr5\" (UniqueName: \"kubernetes.io/projected/90142f0b-e152-479f-b4c1-a3876d6ddd3f-kube-api-access-kxlr5\") pod \"90142f0b-e152-479f-b4c1-a3876d6ddd3f\" (UID: \"90142f0b-e152-479f-b4c1-a3876d6ddd3f\") " Oct 08 08:01:44 crc kubenswrapper[4693]: I1008 08:01:44.613142 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90142f0b-e152-479f-b4c1-a3876d6ddd3f-utilities" (OuterVolumeSpecName: "utilities") pod "90142f0b-e152-479f-b4c1-a3876d6ddd3f" (UID: "90142f0b-e152-479f-b4c1-a3876d6ddd3f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:01:44 crc kubenswrapper[4693]: I1008 08:01:44.617177 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90142f0b-e152-479f-b4c1-a3876d6ddd3f-kube-api-access-kxlr5" (OuterVolumeSpecName: "kube-api-access-kxlr5") pod "90142f0b-e152-479f-b4c1-a3876d6ddd3f" (UID: "90142f0b-e152-479f-b4c1-a3876d6ddd3f"). InnerVolumeSpecName "kube-api-access-kxlr5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 08:01:44 crc kubenswrapper[4693]: I1008 08:01:44.712392 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90142f0b-e152-479f-b4c1-a3876d6ddd3f-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 08:01:44 crc kubenswrapper[4693]: I1008 08:01:44.712442 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxlr5\" (UniqueName: \"kubernetes.io/projected/90142f0b-e152-479f-b4c1-a3876d6ddd3f-kube-api-access-kxlr5\") on node \"crc\" DevicePath \"\"" Oct 08 08:01:44 crc kubenswrapper[4693]: I1008 08:01:44.714899 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90142f0b-e152-479f-b4c1-a3876d6ddd3f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "90142f0b-e152-479f-b4c1-a3876d6ddd3f" (UID: "90142f0b-e152-479f-b4c1-a3876d6ddd3f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:01:44 crc kubenswrapper[4693]: I1008 08:01:44.815062 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90142f0b-e152-479f-b4c1-a3876d6ddd3f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 08:01:44 crc kubenswrapper[4693]: I1008 08:01:44.913759 4693 generic.go:334] "Generic (PLEG): container finished" podID="90142f0b-e152-479f-b4c1-a3876d6ddd3f" containerID="d14f6e21d0aedbd771e1f06a941a7de0b39291344da580cecab7880b6b1e8418" exitCode=0 Oct 08 08:01:44 crc kubenswrapper[4693]: I1008 08:01:44.913804 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6b87x" event={"ID":"90142f0b-e152-479f-b4c1-a3876d6ddd3f","Type":"ContainerDied","Data":"d14f6e21d0aedbd771e1f06a941a7de0b39291344da580cecab7880b6b1e8418"} Oct 08 08:01:44 crc kubenswrapper[4693]: I1008 08:01:44.913858 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6b87x" event={"ID":"90142f0b-e152-479f-b4c1-a3876d6ddd3f","Type":"ContainerDied","Data":"1b5b645577226c33007fc59bb17589e882161b657c97cdead0f65e1ef936b205"} Oct 08 08:01:44 crc kubenswrapper[4693]: I1008 08:01:44.913879 4693 scope.go:117] "RemoveContainer" containerID="d14f6e21d0aedbd771e1f06a941a7de0b39291344da580cecab7880b6b1e8418" Oct 08 08:01:44 crc kubenswrapper[4693]: I1008 08:01:44.914037 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6b87x" Oct 08 08:01:44 crc kubenswrapper[4693]: I1008 08:01:44.938740 4693 scope.go:117] "RemoveContainer" containerID="76342ff800f02ed470c2a6f959547fb90462340d9ad3c71e3fde58cc31d1047a" Oct 08 08:01:44 crc kubenswrapper[4693]: I1008 08:01:44.955443 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6b87x"] Oct 08 08:01:44 crc kubenswrapper[4693]: I1008 08:01:44.966165 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6b87x"] Oct 08 08:01:44 crc kubenswrapper[4693]: I1008 08:01:44.984841 4693 scope.go:117] "RemoveContainer" containerID="9f7ad0c84c62cefecdc97f0ce23d92c11b7c54be488db3596255918ce060a4bb" Oct 08 08:01:45 crc kubenswrapper[4693]: I1008 08:01:45.015939 4693 scope.go:117] "RemoveContainer" containerID="d14f6e21d0aedbd771e1f06a941a7de0b39291344da580cecab7880b6b1e8418" Oct 08 08:01:45 crc kubenswrapper[4693]: E1008 08:01:45.016284 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d14f6e21d0aedbd771e1f06a941a7de0b39291344da580cecab7880b6b1e8418\": container with ID starting with d14f6e21d0aedbd771e1f06a941a7de0b39291344da580cecab7880b6b1e8418 not found: ID does not exist" containerID="d14f6e21d0aedbd771e1f06a941a7de0b39291344da580cecab7880b6b1e8418" Oct 08 08:01:45 crc kubenswrapper[4693]: I1008 08:01:45.016407 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d14f6e21d0aedbd771e1f06a941a7de0b39291344da580cecab7880b6b1e8418"} err="failed to get container status \"d14f6e21d0aedbd771e1f06a941a7de0b39291344da580cecab7880b6b1e8418\": rpc error: code = NotFound desc = could not find container \"d14f6e21d0aedbd771e1f06a941a7de0b39291344da580cecab7880b6b1e8418\": container with ID starting with d14f6e21d0aedbd771e1f06a941a7de0b39291344da580cecab7880b6b1e8418 not found: ID does not exist" Oct 08 08:01:45 crc kubenswrapper[4693]: I1008 08:01:45.016502 4693 scope.go:117] "RemoveContainer" containerID="76342ff800f02ed470c2a6f959547fb90462340d9ad3c71e3fde58cc31d1047a" Oct 08 08:01:45 crc kubenswrapper[4693]: E1008 08:01:45.016868 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76342ff800f02ed470c2a6f959547fb90462340d9ad3c71e3fde58cc31d1047a\": container with ID starting with 76342ff800f02ed470c2a6f959547fb90462340d9ad3c71e3fde58cc31d1047a not found: ID does not exist" containerID="76342ff800f02ed470c2a6f959547fb90462340d9ad3c71e3fde58cc31d1047a" Oct 08 08:01:45 crc kubenswrapper[4693]: I1008 08:01:45.016983 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76342ff800f02ed470c2a6f959547fb90462340d9ad3c71e3fde58cc31d1047a"} err="failed to get container status \"76342ff800f02ed470c2a6f959547fb90462340d9ad3c71e3fde58cc31d1047a\": rpc error: code = NotFound desc = could not find container \"76342ff800f02ed470c2a6f959547fb90462340d9ad3c71e3fde58cc31d1047a\": container with ID starting with 76342ff800f02ed470c2a6f959547fb90462340d9ad3c71e3fde58cc31d1047a not found: ID does not exist" Oct 08 08:01:45 crc kubenswrapper[4693]: I1008 08:01:45.017156 4693 scope.go:117] "RemoveContainer" containerID="9f7ad0c84c62cefecdc97f0ce23d92c11b7c54be488db3596255918ce060a4bb" Oct 08 08:01:45 crc kubenswrapper[4693]: E1008 08:01:45.017501 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f7ad0c84c62cefecdc97f0ce23d92c11b7c54be488db3596255918ce060a4bb\": container with ID starting with 9f7ad0c84c62cefecdc97f0ce23d92c11b7c54be488db3596255918ce060a4bb not found: ID does not exist" containerID="9f7ad0c84c62cefecdc97f0ce23d92c11b7c54be488db3596255918ce060a4bb" Oct 08 08:01:45 crc kubenswrapper[4693]: I1008 08:01:45.017526 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f7ad0c84c62cefecdc97f0ce23d92c11b7c54be488db3596255918ce060a4bb"} err="failed to get container status \"9f7ad0c84c62cefecdc97f0ce23d92c11b7c54be488db3596255918ce060a4bb\": rpc error: code = NotFound desc = could not find container \"9f7ad0c84c62cefecdc97f0ce23d92c11b7c54be488db3596255918ce060a4bb\": container with ID starting with 9f7ad0c84c62cefecdc97f0ce23d92c11b7c54be488db3596255918ce060a4bb not found: ID does not exist" Oct 08 08:01:45 crc kubenswrapper[4693]: I1008 08:01:45.385867 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90142f0b-e152-479f-b4c1-a3876d6ddd3f" path="/var/lib/kubelet/pods/90142f0b-e152-479f-b4c1-a3876d6ddd3f/volumes" Oct 08 08:01:46 crc kubenswrapper[4693]: I1008 08:01:46.780892 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-txwqv"] Oct 08 08:01:46 crc kubenswrapper[4693]: E1008 08:01:46.781675 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90142f0b-e152-479f-b4c1-a3876d6ddd3f" containerName="registry-server" Oct 08 08:01:46 crc kubenswrapper[4693]: I1008 08:01:46.781690 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="90142f0b-e152-479f-b4c1-a3876d6ddd3f" containerName="registry-server" Oct 08 08:01:46 crc kubenswrapper[4693]: E1008 08:01:46.781706 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90142f0b-e152-479f-b4c1-a3876d6ddd3f" containerName="extract-content" Oct 08 08:01:46 crc kubenswrapper[4693]: I1008 08:01:46.781714 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="90142f0b-e152-479f-b4c1-a3876d6ddd3f" containerName="extract-content" Oct 08 08:01:46 crc kubenswrapper[4693]: E1008 08:01:46.781732 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90142f0b-e152-479f-b4c1-a3876d6ddd3f" containerName="extract-utilities" Oct 08 08:01:46 crc kubenswrapper[4693]: I1008 08:01:46.781741 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="90142f0b-e152-479f-b4c1-a3876d6ddd3f" containerName="extract-utilities" Oct 08 08:01:46 crc kubenswrapper[4693]: I1008 08:01:46.781985 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="90142f0b-e152-479f-b4c1-a3876d6ddd3f" containerName="registry-server" Oct 08 08:01:46 crc kubenswrapper[4693]: I1008 08:01:46.783535 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-txwqv" Oct 08 08:01:46 crc kubenswrapper[4693]: I1008 08:01:46.808045 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-txwqv"] Oct 08 08:01:46 crc kubenswrapper[4693]: I1008 08:01:46.858764 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac-utilities\") pod \"certified-operators-txwqv\" (UID: \"30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac\") " pod="openshift-marketplace/certified-operators-txwqv" Oct 08 08:01:46 crc kubenswrapper[4693]: I1008 08:01:46.858864 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjq44\" (UniqueName: \"kubernetes.io/projected/30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac-kube-api-access-jjq44\") pod \"certified-operators-txwqv\" (UID: \"30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac\") " pod="openshift-marketplace/certified-operators-txwqv" Oct 08 08:01:46 crc kubenswrapper[4693]: I1008 08:01:46.858931 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac-catalog-content\") pod \"certified-operators-txwqv\" (UID: \"30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac\") " pod="openshift-marketplace/certified-operators-txwqv" Oct 08 08:01:46 crc kubenswrapper[4693]: I1008 08:01:46.962198 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac-utilities\") pod \"certified-operators-txwqv\" (UID: \"30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac\") " pod="openshift-marketplace/certified-operators-txwqv" Oct 08 08:01:46 crc kubenswrapper[4693]: I1008 08:01:46.962270 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjq44\" (UniqueName: \"kubernetes.io/projected/30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac-kube-api-access-jjq44\") pod \"certified-operators-txwqv\" (UID: \"30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac\") " pod="openshift-marketplace/certified-operators-txwqv" Oct 08 08:01:46 crc kubenswrapper[4693]: I1008 08:01:46.962303 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac-catalog-content\") pod \"certified-operators-txwqv\" (UID: \"30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac\") " pod="openshift-marketplace/certified-operators-txwqv" Oct 08 08:01:46 crc kubenswrapper[4693]: I1008 08:01:46.962923 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac-utilities\") pod \"certified-operators-txwqv\" (UID: \"30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac\") " pod="openshift-marketplace/certified-operators-txwqv" Oct 08 08:01:46 crc kubenswrapper[4693]: I1008 08:01:46.962959 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac-catalog-content\") pod \"certified-operators-txwqv\" (UID: \"30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac\") " pod="openshift-marketplace/certified-operators-txwqv" Oct 08 08:01:46 crc kubenswrapper[4693]: I1008 08:01:46.985720 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjq44\" (UniqueName: \"kubernetes.io/projected/30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac-kube-api-access-jjq44\") pod \"certified-operators-txwqv\" (UID: \"30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac\") " pod="openshift-marketplace/certified-operators-txwqv" Oct 08 08:01:47 crc kubenswrapper[4693]: I1008 08:01:47.134147 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-txwqv" Oct 08 08:01:47 crc kubenswrapper[4693]: I1008 08:01:47.629015 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-txwqv"] Oct 08 08:01:47 crc kubenswrapper[4693]: I1008 08:01:47.949663 4693 generic.go:334] "Generic (PLEG): container finished" podID="30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac" containerID="878a5a145c7583a21019075d5eb8c5c5c3c7d9fd7616c6263bc56ffb00222727" exitCode=0 Oct 08 08:01:47 crc kubenswrapper[4693]: I1008 08:01:47.949704 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-txwqv" event={"ID":"30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac","Type":"ContainerDied","Data":"878a5a145c7583a21019075d5eb8c5c5c3c7d9fd7616c6263bc56ffb00222727"} Oct 08 08:01:47 crc kubenswrapper[4693]: I1008 08:01:47.949726 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-txwqv" event={"ID":"30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac","Type":"ContainerStarted","Data":"2f776ba25c309909cdc8da534c9bedc2e7d9b90fe6a985f489313be4d7f5ad08"} Oct 08 08:01:49 crc kubenswrapper[4693]: I1008 08:01:49.975864 4693 generic.go:334] "Generic (PLEG): container finished" podID="30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac" containerID="3abd806ab455591cb672bafbf0d2c27177541cafc400c76a444b4f12c31febaa" exitCode=0 Oct 08 08:01:49 crc kubenswrapper[4693]: I1008 08:01:49.975936 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-txwqv" event={"ID":"30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac","Type":"ContainerDied","Data":"3abd806ab455591cb672bafbf0d2c27177541cafc400c76a444b4f12c31febaa"} Oct 08 08:01:52 crc kubenswrapper[4693]: I1008 08:01:52.000130 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-txwqv" event={"ID":"30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac","Type":"ContainerStarted","Data":"1383dbdfcb14349d7487b217602fcefbef63698714941fcea4ebebd15a291126"} Oct 08 08:01:52 crc kubenswrapper[4693]: I1008 08:01:52.027671 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-txwqv" podStartSLOduration=2.536031272 podStartE2EDuration="6.027651585s" podCreationTimestamp="2025-10-08 08:01:46 +0000 UTC" firstStartedPulling="2025-10-08 08:01:47.954309731 +0000 UTC m=+2693.325274666" lastFinishedPulling="2025-10-08 08:01:51.445930044 +0000 UTC m=+2696.816894979" observedRunningTime="2025-10-08 08:01:52.022752927 +0000 UTC m=+2697.393717872" watchObservedRunningTime="2025-10-08 08:01:52.027651585 +0000 UTC m=+2697.398616520" Oct 08 08:01:57 crc kubenswrapper[4693]: I1008 08:01:57.135099 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-txwqv" Oct 08 08:01:57 crc kubenswrapper[4693]: I1008 08:01:57.135714 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-txwqv" Oct 08 08:01:57 crc kubenswrapper[4693]: I1008 08:01:57.222631 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-txwqv" Oct 08 08:01:58 crc kubenswrapper[4693]: I1008 08:01:58.168373 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-txwqv" Oct 08 08:01:58 crc kubenswrapper[4693]: I1008 08:01:58.283528 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-txwqv"] Oct 08 08:02:00 crc kubenswrapper[4693]: I1008 08:02:00.137843 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-txwqv" podUID="30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac" containerName="registry-server" containerID="cri-o://1383dbdfcb14349d7487b217602fcefbef63698714941fcea4ebebd15a291126" gracePeriod=2 Oct 08 08:02:00 crc kubenswrapper[4693]: I1008 08:02:00.642174 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-txwqv" Oct 08 08:02:00 crc kubenswrapper[4693]: I1008 08:02:00.774876 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac-utilities\") pod \"30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac\" (UID: \"30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac\") " Oct 08 08:02:00 crc kubenswrapper[4693]: I1008 08:02:00.775010 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac-catalog-content\") pod \"30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac\" (UID: \"30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac\") " Oct 08 08:02:00 crc kubenswrapper[4693]: I1008 08:02:00.775114 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jjq44\" (UniqueName: \"kubernetes.io/projected/30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac-kube-api-access-jjq44\") pod \"30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac\" (UID: \"30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac\") " Oct 08 08:02:00 crc kubenswrapper[4693]: I1008 08:02:00.776092 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac-utilities" (OuterVolumeSpecName: "utilities") pod "30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac" (UID: "30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:02:00 crc kubenswrapper[4693]: I1008 08:02:00.780924 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac-kube-api-access-jjq44" (OuterVolumeSpecName: "kube-api-access-jjq44") pod "30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac" (UID: "30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac"). InnerVolumeSpecName "kube-api-access-jjq44". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 08:02:00 crc kubenswrapper[4693]: I1008 08:02:00.842613 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac" (UID: "30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:02:00 crc kubenswrapper[4693]: I1008 08:02:00.878121 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 08:02:00 crc kubenswrapper[4693]: I1008 08:02:00.878165 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jjq44\" (UniqueName: \"kubernetes.io/projected/30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac-kube-api-access-jjq44\") on node \"crc\" DevicePath \"\"" Oct 08 08:02:00 crc kubenswrapper[4693]: I1008 08:02:00.878181 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 08:02:01 crc kubenswrapper[4693]: I1008 08:02:01.162098 4693 generic.go:334] "Generic (PLEG): container finished" podID="30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac" containerID="1383dbdfcb14349d7487b217602fcefbef63698714941fcea4ebebd15a291126" exitCode=0 Oct 08 08:02:01 crc kubenswrapper[4693]: I1008 08:02:01.162173 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-txwqv" event={"ID":"30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac","Type":"ContainerDied","Data":"1383dbdfcb14349d7487b217602fcefbef63698714941fcea4ebebd15a291126"} Oct 08 08:02:01 crc kubenswrapper[4693]: I1008 08:02:01.162220 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-txwqv" event={"ID":"30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac","Type":"ContainerDied","Data":"2f776ba25c309909cdc8da534c9bedc2e7d9b90fe6a985f489313be4d7f5ad08"} Oct 08 08:02:01 crc kubenswrapper[4693]: I1008 08:02:01.162250 4693 scope.go:117] "RemoveContainer" containerID="1383dbdfcb14349d7487b217602fcefbef63698714941fcea4ebebd15a291126" Oct 08 08:02:01 crc kubenswrapper[4693]: I1008 08:02:01.162338 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-txwqv" Oct 08 08:02:01 crc kubenswrapper[4693]: I1008 08:02:01.206669 4693 scope.go:117] "RemoveContainer" containerID="3abd806ab455591cb672bafbf0d2c27177541cafc400c76a444b4f12c31febaa" Oct 08 08:02:01 crc kubenswrapper[4693]: I1008 08:02:01.235478 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-txwqv"] Oct 08 08:02:01 crc kubenswrapper[4693]: I1008 08:02:01.241326 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-txwqv"] Oct 08 08:02:01 crc kubenswrapper[4693]: I1008 08:02:01.264480 4693 scope.go:117] "RemoveContainer" containerID="878a5a145c7583a21019075d5eb8c5c5c3c7d9fd7616c6263bc56ffb00222727" Oct 08 08:02:01 crc kubenswrapper[4693]: I1008 08:02:01.320632 4693 scope.go:117] "RemoveContainer" containerID="1383dbdfcb14349d7487b217602fcefbef63698714941fcea4ebebd15a291126" Oct 08 08:02:01 crc kubenswrapper[4693]: E1008 08:02:01.322141 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1383dbdfcb14349d7487b217602fcefbef63698714941fcea4ebebd15a291126\": container with ID starting with 1383dbdfcb14349d7487b217602fcefbef63698714941fcea4ebebd15a291126 not found: ID does not exist" containerID="1383dbdfcb14349d7487b217602fcefbef63698714941fcea4ebebd15a291126" Oct 08 08:02:01 crc kubenswrapper[4693]: I1008 08:02:01.322196 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1383dbdfcb14349d7487b217602fcefbef63698714941fcea4ebebd15a291126"} err="failed to get container status \"1383dbdfcb14349d7487b217602fcefbef63698714941fcea4ebebd15a291126\": rpc error: code = NotFound desc = could not find container \"1383dbdfcb14349d7487b217602fcefbef63698714941fcea4ebebd15a291126\": container with ID starting with 1383dbdfcb14349d7487b217602fcefbef63698714941fcea4ebebd15a291126 not found: ID does not exist" Oct 08 08:02:01 crc kubenswrapper[4693]: I1008 08:02:01.322230 4693 scope.go:117] "RemoveContainer" containerID="3abd806ab455591cb672bafbf0d2c27177541cafc400c76a444b4f12c31febaa" Oct 08 08:02:01 crc kubenswrapper[4693]: E1008 08:02:01.323251 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3abd806ab455591cb672bafbf0d2c27177541cafc400c76a444b4f12c31febaa\": container with ID starting with 3abd806ab455591cb672bafbf0d2c27177541cafc400c76a444b4f12c31febaa not found: ID does not exist" containerID="3abd806ab455591cb672bafbf0d2c27177541cafc400c76a444b4f12c31febaa" Oct 08 08:02:01 crc kubenswrapper[4693]: I1008 08:02:01.323352 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3abd806ab455591cb672bafbf0d2c27177541cafc400c76a444b4f12c31febaa"} err="failed to get container status \"3abd806ab455591cb672bafbf0d2c27177541cafc400c76a444b4f12c31febaa\": rpc error: code = NotFound desc = could not find container \"3abd806ab455591cb672bafbf0d2c27177541cafc400c76a444b4f12c31febaa\": container with ID starting with 3abd806ab455591cb672bafbf0d2c27177541cafc400c76a444b4f12c31febaa not found: ID does not exist" Oct 08 08:02:01 crc kubenswrapper[4693]: I1008 08:02:01.323617 4693 scope.go:117] "RemoveContainer" containerID="878a5a145c7583a21019075d5eb8c5c5c3c7d9fd7616c6263bc56ffb00222727" Oct 08 08:02:01 crc kubenswrapper[4693]: E1008 08:02:01.324191 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"878a5a145c7583a21019075d5eb8c5c5c3c7d9fd7616c6263bc56ffb00222727\": container with ID starting with 878a5a145c7583a21019075d5eb8c5c5c3c7d9fd7616c6263bc56ffb00222727 not found: ID does not exist" containerID="878a5a145c7583a21019075d5eb8c5c5c3c7d9fd7616c6263bc56ffb00222727" Oct 08 08:02:01 crc kubenswrapper[4693]: I1008 08:02:01.324226 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"878a5a145c7583a21019075d5eb8c5c5c3c7d9fd7616c6263bc56ffb00222727"} err="failed to get container status \"878a5a145c7583a21019075d5eb8c5c5c3c7d9fd7616c6263bc56ffb00222727\": rpc error: code = NotFound desc = could not find container \"878a5a145c7583a21019075d5eb8c5c5c3c7d9fd7616c6263bc56ffb00222727\": container with ID starting with 878a5a145c7583a21019075d5eb8c5c5c3c7d9fd7616c6263bc56ffb00222727 not found: ID does not exist" Oct 08 08:02:01 crc kubenswrapper[4693]: I1008 08:02:01.387646 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac" path="/var/lib/kubelet/pods/30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac/volumes" Oct 08 08:02:53 crc kubenswrapper[4693]: I1008 08:02:53.783011 4693 generic.go:334] "Generic (PLEG): container finished" podID="be7009a4-69bd-41cc-8fe8-02e5d79db395" containerID="7e12ab150618e8ee37c6e824e9bc0e6e022a9b56af47fa39491fcdc50af7a146" exitCode=0 Oct 08 08:02:53 crc kubenswrapper[4693]: I1008 08:02:53.783675 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" event={"ID":"be7009a4-69bd-41cc-8fe8-02e5d79db395","Type":"ContainerDied","Data":"7e12ab150618e8ee37c6e824e9bc0e6e022a9b56af47fa39491fcdc50af7a146"} Oct 08 08:02:55 crc kubenswrapper[4693]: I1008 08:02:55.219793 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" Oct 08 08:02:55 crc kubenswrapper[4693]: I1008 08:02:55.360105 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-ceilometer-compute-config-data-1\") pod \"be7009a4-69bd-41cc-8fe8-02e5d79db395\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " Oct 08 08:02:55 crc kubenswrapper[4693]: I1008 08:02:55.360205 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-inventory\") pod \"be7009a4-69bd-41cc-8fe8-02e5d79db395\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " Oct 08 08:02:55 crc kubenswrapper[4693]: I1008 08:02:55.360278 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-ceilometer-compute-config-data-2\") pod \"be7009a4-69bd-41cc-8fe8-02e5d79db395\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " Oct 08 08:02:55 crc kubenswrapper[4693]: I1008 08:02:55.360334 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-ceilometer-compute-config-data-0\") pod \"be7009a4-69bd-41cc-8fe8-02e5d79db395\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " Oct 08 08:02:55 crc kubenswrapper[4693]: I1008 08:02:55.360452 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8bgb8\" (UniqueName: \"kubernetes.io/projected/be7009a4-69bd-41cc-8fe8-02e5d79db395-kube-api-access-8bgb8\") pod \"be7009a4-69bd-41cc-8fe8-02e5d79db395\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " Oct 08 08:02:55 crc kubenswrapper[4693]: I1008 08:02:55.360513 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-telemetry-combined-ca-bundle\") pod \"be7009a4-69bd-41cc-8fe8-02e5d79db395\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " Oct 08 08:02:55 crc kubenswrapper[4693]: I1008 08:02:55.360613 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-ssh-key\") pod \"be7009a4-69bd-41cc-8fe8-02e5d79db395\" (UID: \"be7009a4-69bd-41cc-8fe8-02e5d79db395\") " Oct 08 08:02:55 crc kubenswrapper[4693]: I1008 08:02:55.376106 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "be7009a4-69bd-41cc-8fe8-02e5d79db395" (UID: "be7009a4-69bd-41cc-8fe8-02e5d79db395"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 08:02:55 crc kubenswrapper[4693]: I1008 08:02:55.376109 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be7009a4-69bd-41cc-8fe8-02e5d79db395-kube-api-access-8bgb8" (OuterVolumeSpecName: "kube-api-access-8bgb8") pod "be7009a4-69bd-41cc-8fe8-02e5d79db395" (UID: "be7009a4-69bd-41cc-8fe8-02e5d79db395"). InnerVolumeSpecName "kube-api-access-8bgb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 08:02:55 crc kubenswrapper[4693]: I1008 08:02:55.405000 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-inventory" (OuterVolumeSpecName: "inventory") pod "be7009a4-69bd-41cc-8fe8-02e5d79db395" (UID: "be7009a4-69bd-41cc-8fe8-02e5d79db395"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 08:02:55 crc kubenswrapper[4693]: I1008 08:02:55.412208 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "be7009a4-69bd-41cc-8fe8-02e5d79db395" (UID: "be7009a4-69bd-41cc-8fe8-02e5d79db395"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 08:02:55 crc kubenswrapper[4693]: I1008 08:02:55.419419 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "be7009a4-69bd-41cc-8fe8-02e5d79db395" (UID: "be7009a4-69bd-41cc-8fe8-02e5d79db395"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 08:02:55 crc kubenswrapper[4693]: I1008 08:02:55.428352 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "be7009a4-69bd-41cc-8fe8-02e5d79db395" (UID: "be7009a4-69bd-41cc-8fe8-02e5d79db395"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 08:02:55 crc kubenswrapper[4693]: I1008 08:02:55.430287 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "be7009a4-69bd-41cc-8fe8-02e5d79db395" (UID: "be7009a4-69bd-41cc-8fe8-02e5d79db395"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 08:02:55 crc kubenswrapper[4693]: I1008 08:02:55.462824 4693 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Oct 08 08:02:55 crc kubenswrapper[4693]: I1008 08:02:55.462855 4693 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Oct 08 08:02:55 crc kubenswrapper[4693]: I1008 08:02:55.462866 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8bgb8\" (UniqueName: \"kubernetes.io/projected/be7009a4-69bd-41cc-8fe8-02e5d79db395-kube-api-access-8bgb8\") on node \"crc\" DevicePath \"\"" Oct 08 08:02:55 crc kubenswrapper[4693]: I1008 08:02:55.462875 4693 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 08 08:02:55 crc kubenswrapper[4693]: I1008 08:02:55.462885 4693 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 08 08:02:55 crc kubenswrapper[4693]: I1008 08:02:55.462895 4693 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Oct 08 08:02:55 crc kubenswrapper[4693]: I1008 08:02:55.462906 4693 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/be7009a4-69bd-41cc-8fe8-02e5d79db395-inventory\") on node \"crc\" DevicePath \"\"" Oct 08 08:02:55 crc kubenswrapper[4693]: I1008 08:02:55.802897 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" event={"ID":"be7009a4-69bd-41cc-8fe8-02e5d79db395","Type":"ContainerDied","Data":"0d1e4c44cb35d8b8405c359ef8dc57015bc2f7e35e1f02c43052befba7025d46"} Oct 08 08:02:55 crc kubenswrapper[4693]: I1008 08:02:55.802936 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d1e4c44cb35d8b8405c359ef8dc57015bc2f7e35e1f02c43052befba7025d46" Oct 08 08:02:55 crc kubenswrapper[4693]: I1008 08:02:55.802974 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh" Oct 08 08:03:23 crc kubenswrapper[4693]: I1008 08:03:23.489365 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 08:03:23 crc kubenswrapper[4693]: I1008 08:03:23.490129 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 08:03:38 crc kubenswrapper[4693]: I1008 08:03:38.135224 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-f8mtp"] Oct 08 08:03:38 crc kubenswrapper[4693]: E1008 08:03:38.136300 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac" containerName="registry-server" Oct 08 08:03:38 crc kubenswrapper[4693]: I1008 08:03:38.136317 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac" containerName="registry-server" Oct 08 08:03:38 crc kubenswrapper[4693]: E1008 08:03:38.136332 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac" containerName="extract-utilities" Oct 08 08:03:38 crc kubenswrapper[4693]: I1008 08:03:38.136339 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac" containerName="extract-utilities" Oct 08 08:03:38 crc kubenswrapper[4693]: E1008 08:03:38.136358 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac" containerName="extract-content" Oct 08 08:03:38 crc kubenswrapper[4693]: I1008 08:03:38.136367 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac" containerName="extract-content" Oct 08 08:03:38 crc kubenswrapper[4693]: E1008 08:03:38.136380 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be7009a4-69bd-41cc-8fe8-02e5d79db395" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 08 08:03:38 crc kubenswrapper[4693]: I1008 08:03:38.136389 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="be7009a4-69bd-41cc-8fe8-02e5d79db395" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 08 08:03:38 crc kubenswrapper[4693]: I1008 08:03:38.136636 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="be7009a4-69bd-41cc-8fe8-02e5d79db395" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 08 08:03:38 crc kubenswrapper[4693]: I1008 08:03:38.136654 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="30e110ea-33cf-447a-bfb7-0dc6e1b3d8ac" containerName="registry-server" Oct 08 08:03:38 crc kubenswrapper[4693]: I1008 08:03:38.138286 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f8mtp" Oct 08 08:03:38 crc kubenswrapper[4693]: I1008 08:03:38.158061 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f8mtp"] Oct 08 08:03:38 crc kubenswrapper[4693]: I1008 08:03:38.284946 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15337f12-d635-4d8e-af9b-632078ed421a-utilities\") pod \"community-operators-f8mtp\" (UID: \"15337f12-d635-4d8e-af9b-632078ed421a\") " pod="openshift-marketplace/community-operators-f8mtp" Oct 08 08:03:38 crc kubenswrapper[4693]: I1008 08:03:38.285478 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15337f12-d635-4d8e-af9b-632078ed421a-catalog-content\") pod \"community-operators-f8mtp\" (UID: \"15337f12-d635-4d8e-af9b-632078ed421a\") " pod="openshift-marketplace/community-operators-f8mtp" Oct 08 08:03:38 crc kubenswrapper[4693]: I1008 08:03:38.285706 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2457g\" (UniqueName: \"kubernetes.io/projected/15337f12-d635-4d8e-af9b-632078ed421a-kube-api-access-2457g\") pod \"community-operators-f8mtp\" (UID: \"15337f12-d635-4d8e-af9b-632078ed421a\") " pod="openshift-marketplace/community-operators-f8mtp" Oct 08 08:03:38 crc kubenswrapper[4693]: I1008 08:03:38.387824 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15337f12-d635-4d8e-af9b-632078ed421a-utilities\") pod \"community-operators-f8mtp\" (UID: \"15337f12-d635-4d8e-af9b-632078ed421a\") " pod="openshift-marketplace/community-operators-f8mtp" Oct 08 08:03:38 crc kubenswrapper[4693]: I1008 08:03:38.387887 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15337f12-d635-4d8e-af9b-632078ed421a-catalog-content\") pod \"community-operators-f8mtp\" (UID: \"15337f12-d635-4d8e-af9b-632078ed421a\") " pod="openshift-marketplace/community-operators-f8mtp" Oct 08 08:03:38 crc kubenswrapper[4693]: I1008 08:03:38.387925 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2457g\" (UniqueName: \"kubernetes.io/projected/15337f12-d635-4d8e-af9b-632078ed421a-kube-api-access-2457g\") pod \"community-operators-f8mtp\" (UID: \"15337f12-d635-4d8e-af9b-632078ed421a\") " pod="openshift-marketplace/community-operators-f8mtp" Oct 08 08:03:38 crc kubenswrapper[4693]: I1008 08:03:38.388927 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15337f12-d635-4d8e-af9b-632078ed421a-utilities\") pod \"community-operators-f8mtp\" (UID: \"15337f12-d635-4d8e-af9b-632078ed421a\") " pod="openshift-marketplace/community-operators-f8mtp" Oct 08 08:03:38 crc kubenswrapper[4693]: I1008 08:03:38.389037 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15337f12-d635-4d8e-af9b-632078ed421a-catalog-content\") pod \"community-operators-f8mtp\" (UID: \"15337f12-d635-4d8e-af9b-632078ed421a\") " pod="openshift-marketplace/community-operators-f8mtp" Oct 08 08:03:38 crc kubenswrapper[4693]: I1008 08:03:38.413371 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2457g\" (UniqueName: \"kubernetes.io/projected/15337f12-d635-4d8e-af9b-632078ed421a-kube-api-access-2457g\") pod \"community-operators-f8mtp\" (UID: \"15337f12-d635-4d8e-af9b-632078ed421a\") " pod="openshift-marketplace/community-operators-f8mtp" Oct 08 08:03:38 crc kubenswrapper[4693]: I1008 08:03:38.495315 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f8mtp" Oct 08 08:03:38 crc kubenswrapper[4693]: I1008 08:03:38.996641 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f8mtp"] Oct 08 08:03:39 crc kubenswrapper[4693]: I1008 08:03:39.311858 4693 generic.go:334] "Generic (PLEG): container finished" podID="15337f12-d635-4d8e-af9b-632078ed421a" containerID="2cdb267080263e425b88a85a97c241ea8ae98295098b303e324847ef0793c950" exitCode=0 Oct 08 08:03:39 crc kubenswrapper[4693]: I1008 08:03:39.311964 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f8mtp" event={"ID":"15337f12-d635-4d8e-af9b-632078ed421a","Type":"ContainerDied","Data":"2cdb267080263e425b88a85a97c241ea8ae98295098b303e324847ef0793c950"} Oct 08 08:03:39 crc kubenswrapper[4693]: I1008 08:03:39.312147 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f8mtp" event={"ID":"15337f12-d635-4d8e-af9b-632078ed421a","Type":"ContainerStarted","Data":"483b3b487d4e9b21342c224a6cf2b83d694ff843f801070025bb257cb0943719"} Oct 08 08:03:41 crc kubenswrapper[4693]: I1008 08:03:41.333906 4693 generic.go:334] "Generic (PLEG): container finished" podID="15337f12-d635-4d8e-af9b-632078ed421a" containerID="499d26b8d57a7ce0d3feeceb7f0cdb4edfd055e9f191b70bcd6d2cdc14d89313" exitCode=0 Oct 08 08:03:41 crc kubenswrapper[4693]: I1008 08:03:41.334005 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f8mtp" event={"ID":"15337f12-d635-4d8e-af9b-632078ed421a","Type":"ContainerDied","Data":"499d26b8d57a7ce0d3feeceb7f0cdb4edfd055e9f191b70bcd6d2cdc14d89313"} Oct 08 08:03:42 crc kubenswrapper[4693]: I1008 08:03:42.372120 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f8mtp" event={"ID":"15337f12-d635-4d8e-af9b-632078ed421a","Type":"ContainerStarted","Data":"b8d6e053663490944fbb924ba10808db814d0905fac2eec77c7c56df42fa2dad"} Oct 08 08:03:42 crc kubenswrapper[4693]: I1008 08:03:42.431140 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-f8mtp" podStartSLOduration=1.897680914 podStartE2EDuration="4.431119291s" podCreationTimestamp="2025-10-08 08:03:38 +0000 UTC" firstStartedPulling="2025-10-08 08:03:39.315867539 +0000 UTC m=+2804.686832514" lastFinishedPulling="2025-10-08 08:03:41.849305956 +0000 UTC m=+2807.220270891" observedRunningTime="2025-10-08 08:03:42.418173961 +0000 UTC m=+2807.789138896" watchObservedRunningTime="2025-10-08 08:03:42.431119291 +0000 UTC m=+2807.802084226" Oct 08 08:03:48 crc kubenswrapper[4693]: I1008 08:03:48.497348 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-f8mtp" Oct 08 08:03:48 crc kubenswrapper[4693]: I1008 08:03:48.498336 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-f8mtp" Oct 08 08:03:48 crc kubenswrapper[4693]: I1008 08:03:48.581408 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-f8mtp" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.206147 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.209015 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.211676 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.211726 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-mpx49" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.214563 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.214642 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.237733 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.342671 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/125016ff-a340-49c8-8c6f-9eed2093e1af-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.342763 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.342923 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/125016ff-a340-49c8-8c6f-9eed2093e1af-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.343058 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/125016ff-a340-49c8-8c6f-9eed2093e1af-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.343145 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/125016ff-a340-49c8-8c6f-9eed2093e1af-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.343192 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hb9pq\" (UniqueName: \"kubernetes.io/projected/125016ff-a340-49c8-8c6f-9eed2093e1af-kube-api-access-hb9pq\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.343238 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/125016ff-a340-49c8-8c6f-9eed2093e1af-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.343291 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/125016ff-a340-49c8-8c6f-9eed2093e1af-config-data\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.343365 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/125016ff-a340-49c8-8c6f-9eed2093e1af-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.445990 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/125016ff-a340-49c8-8c6f-9eed2093e1af-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.446554 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/125016ff-a340-49c8-8c6f-9eed2093e1af-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.446611 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hb9pq\" (UniqueName: \"kubernetes.io/projected/125016ff-a340-49c8-8c6f-9eed2093e1af-kube-api-access-hb9pq\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.446662 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/125016ff-a340-49c8-8c6f-9eed2093e1af-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.446737 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/125016ff-a340-49c8-8c6f-9eed2093e1af-config-data\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.446799 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/125016ff-a340-49c8-8c6f-9eed2093e1af-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.446964 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/125016ff-a340-49c8-8c6f-9eed2093e1af-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.447008 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.447119 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/125016ff-a340-49c8-8c6f-9eed2093e1af-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.447297 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/125016ff-a340-49c8-8c6f-9eed2093e1af-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.447846 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/125016ff-a340-49c8-8c6f-9eed2093e1af-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.448500 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/125016ff-a340-49c8-8c6f-9eed2093e1af-config-data\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.448548 4693 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.448606 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/125016ff-a340-49c8-8c6f-9eed2093e1af-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.455231 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/125016ff-a340-49c8-8c6f-9eed2093e1af-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.456996 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/125016ff-a340-49c8-8c6f-9eed2093e1af-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.462027 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/125016ff-a340-49c8-8c6f-9eed2093e1af-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.467889 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hb9pq\" (UniqueName: \"kubernetes.io/projected/125016ff-a340-49c8-8c6f-9eed2093e1af-kube-api-access-hb9pq\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.498548 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.538874 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.555053 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-f8mtp" Oct 08 08:03:49 crc kubenswrapper[4693]: I1008 08:03:49.626871 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-f8mtp"] Oct 08 08:03:50 crc kubenswrapper[4693]: I1008 08:03:50.003205 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 08 08:03:50 crc kubenswrapper[4693]: I1008 08:03:50.502372 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"125016ff-a340-49c8-8c6f-9eed2093e1af","Type":"ContainerStarted","Data":"2fd3b4acfc02b790eb1306de225c05f442235f7485a858e3312ca5597dc63c33"} Oct 08 08:03:51 crc kubenswrapper[4693]: I1008 08:03:51.518058 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-f8mtp" podUID="15337f12-d635-4d8e-af9b-632078ed421a" containerName="registry-server" containerID="cri-o://b8d6e053663490944fbb924ba10808db814d0905fac2eec77c7c56df42fa2dad" gracePeriod=2 Oct 08 08:03:52 crc kubenswrapper[4693]: I1008 08:03:52.528477 4693 generic.go:334] "Generic (PLEG): container finished" podID="15337f12-d635-4d8e-af9b-632078ed421a" containerID="b8d6e053663490944fbb924ba10808db814d0905fac2eec77c7c56df42fa2dad" exitCode=0 Oct 08 08:03:52 crc kubenswrapper[4693]: I1008 08:03:52.528683 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f8mtp" event={"ID":"15337f12-d635-4d8e-af9b-632078ed421a","Type":"ContainerDied","Data":"b8d6e053663490944fbb924ba10808db814d0905fac2eec77c7c56df42fa2dad"} Oct 08 08:03:53 crc kubenswrapper[4693]: I1008 08:03:53.489644 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 08:03:53 crc kubenswrapper[4693]: I1008 08:03:53.490008 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 08:03:55 crc kubenswrapper[4693]: I1008 08:03:55.555590 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f8mtp" event={"ID":"15337f12-d635-4d8e-af9b-632078ed421a","Type":"ContainerDied","Data":"483b3b487d4e9b21342c224a6cf2b83d694ff843f801070025bb257cb0943719"} Oct 08 08:03:55 crc kubenswrapper[4693]: I1008 08:03:55.556099 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="483b3b487d4e9b21342c224a6cf2b83d694ff843f801070025bb257cb0943719" Oct 08 08:03:55 crc kubenswrapper[4693]: I1008 08:03:55.639583 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f8mtp" Oct 08 08:03:55 crc kubenswrapper[4693]: I1008 08:03:55.783276 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15337f12-d635-4d8e-af9b-632078ed421a-catalog-content\") pod \"15337f12-d635-4d8e-af9b-632078ed421a\" (UID: \"15337f12-d635-4d8e-af9b-632078ed421a\") " Oct 08 08:03:55 crc kubenswrapper[4693]: I1008 08:03:55.783342 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2457g\" (UniqueName: \"kubernetes.io/projected/15337f12-d635-4d8e-af9b-632078ed421a-kube-api-access-2457g\") pod \"15337f12-d635-4d8e-af9b-632078ed421a\" (UID: \"15337f12-d635-4d8e-af9b-632078ed421a\") " Oct 08 08:03:55 crc kubenswrapper[4693]: I1008 08:03:55.783405 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15337f12-d635-4d8e-af9b-632078ed421a-utilities\") pod \"15337f12-d635-4d8e-af9b-632078ed421a\" (UID: \"15337f12-d635-4d8e-af9b-632078ed421a\") " Oct 08 08:03:55 crc kubenswrapper[4693]: I1008 08:03:55.784429 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15337f12-d635-4d8e-af9b-632078ed421a-utilities" (OuterVolumeSpecName: "utilities") pod "15337f12-d635-4d8e-af9b-632078ed421a" (UID: "15337f12-d635-4d8e-af9b-632078ed421a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:03:55 crc kubenswrapper[4693]: I1008 08:03:55.793026 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15337f12-d635-4d8e-af9b-632078ed421a-kube-api-access-2457g" (OuterVolumeSpecName: "kube-api-access-2457g") pod "15337f12-d635-4d8e-af9b-632078ed421a" (UID: "15337f12-d635-4d8e-af9b-632078ed421a"). InnerVolumeSpecName "kube-api-access-2457g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 08:03:55 crc kubenswrapper[4693]: I1008 08:03:55.836040 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15337f12-d635-4d8e-af9b-632078ed421a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "15337f12-d635-4d8e-af9b-632078ed421a" (UID: "15337f12-d635-4d8e-af9b-632078ed421a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:03:55 crc kubenswrapper[4693]: I1008 08:03:55.885653 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15337f12-d635-4d8e-af9b-632078ed421a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 08:03:55 crc kubenswrapper[4693]: I1008 08:03:55.885702 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2457g\" (UniqueName: \"kubernetes.io/projected/15337f12-d635-4d8e-af9b-632078ed421a-kube-api-access-2457g\") on node \"crc\" DevicePath \"\"" Oct 08 08:03:55 crc kubenswrapper[4693]: I1008 08:03:55.885722 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15337f12-d635-4d8e-af9b-632078ed421a-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 08:03:56 crc kubenswrapper[4693]: I1008 08:03:56.566287 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f8mtp" Oct 08 08:03:56 crc kubenswrapper[4693]: I1008 08:03:56.609051 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-f8mtp"] Oct 08 08:03:56 crc kubenswrapper[4693]: I1008 08:03:56.625405 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-f8mtp"] Oct 08 08:03:57 crc kubenswrapper[4693]: I1008 08:03:57.379707 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15337f12-d635-4d8e-af9b-632078ed421a" path="/var/lib/kubelet/pods/15337f12-d635-4d8e-af9b-632078ed421a/volumes" Oct 08 08:04:18 crc kubenswrapper[4693]: E1008 08:04:18.083156 4693 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Oct 08 08:04:18 crc kubenswrapper[4693]: E1008 08:04:18.084063 4693 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hb9pq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(125016ff-a340-49c8-8c6f-9eed2093e1af): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 08 08:04:18 crc kubenswrapper[4693]: E1008 08:04:18.085209 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="125016ff-a340-49c8-8c6f-9eed2093e1af" Oct 08 08:04:18 crc kubenswrapper[4693]: E1008 08:04:18.796594 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="125016ff-a340-49c8-8c6f-9eed2093e1af" Oct 08 08:04:23 crc kubenswrapper[4693]: I1008 08:04:23.489673 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 08:04:23 crc kubenswrapper[4693]: I1008 08:04:23.490723 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 08:04:23 crc kubenswrapper[4693]: I1008 08:04:23.492180 4693 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 08:04:23 crc kubenswrapper[4693]: I1008 08:04:23.493273 4693 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988"} pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 08 08:04:23 crc kubenswrapper[4693]: I1008 08:04:23.493383 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" containerID="cri-o://288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988" gracePeriod=600 Oct 08 08:04:23 crc kubenswrapper[4693]: E1008 08:04:23.625979 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:04:23 crc kubenswrapper[4693]: I1008 08:04:23.851888 4693 generic.go:334] "Generic (PLEG): container finished" podID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerID="288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988" exitCode=0 Oct 08 08:04:23 crc kubenswrapper[4693]: I1008 08:04:23.852667 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerDied","Data":"288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988"} Oct 08 08:04:23 crc kubenswrapper[4693]: I1008 08:04:23.852925 4693 scope.go:117] "RemoveContainer" containerID="7f7ddc6a4e37f0f16608812de5d34fc6d614c42d995eb4b20e05072ddcc7127c" Oct 08 08:04:23 crc kubenswrapper[4693]: I1008 08:04:23.854314 4693 scope.go:117] "RemoveContainer" containerID="288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988" Oct 08 08:04:23 crc kubenswrapper[4693]: E1008 08:04:23.855181 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:04:33 crc kubenswrapper[4693]: I1008 08:04:33.867921 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Oct 08 08:04:34 crc kubenswrapper[4693]: I1008 08:04:34.363725 4693 scope.go:117] "RemoveContainer" containerID="288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988" Oct 08 08:04:34 crc kubenswrapper[4693]: E1008 08:04:34.364123 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:04:35 crc kubenswrapper[4693]: I1008 08:04:35.985809 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"125016ff-a340-49c8-8c6f-9eed2093e1af","Type":"ContainerStarted","Data":"fc2e2ef8b6be8325119cd78ac696438300077e42d809d9b2fe050f34b65695d9"} Oct 08 08:04:47 crc kubenswrapper[4693]: I1008 08:04:47.366270 4693 scope.go:117] "RemoveContainer" containerID="288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988" Oct 08 08:04:47 crc kubenswrapper[4693]: E1008 08:04:47.367465 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:05:01 crc kubenswrapper[4693]: I1008 08:05:01.362987 4693 scope.go:117] "RemoveContainer" containerID="288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988" Oct 08 08:05:01 crc kubenswrapper[4693]: E1008 08:05:01.363849 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:05:16 crc kubenswrapper[4693]: I1008 08:05:16.363332 4693 scope.go:117] "RemoveContainer" containerID="288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988" Oct 08 08:05:16 crc kubenswrapper[4693]: E1008 08:05:16.364753 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:05:27 crc kubenswrapper[4693]: I1008 08:05:27.365131 4693 scope.go:117] "RemoveContainer" containerID="288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988" Oct 08 08:05:27 crc kubenswrapper[4693]: E1008 08:05:27.365986 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:05:42 crc kubenswrapper[4693]: I1008 08:05:42.363693 4693 scope.go:117] "RemoveContainer" containerID="288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988" Oct 08 08:05:42 crc kubenswrapper[4693]: E1008 08:05:42.364516 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:05:57 crc kubenswrapper[4693]: I1008 08:05:57.363933 4693 scope.go:117] "RemoveContainer" containerID="288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988" Oct 08 08:05:57 crc kubenswrapper[4693]: E1008 08:05:57.364958 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:06:10 crc kubenswrapper[4693]: I1008 08:06:10.363637 4693 scope.go:117] "RemoveContainer" containerID="288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988" Oct 08 08:06:10 crc kubenswrapper[4693]: E1008 08:06:10.364326 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:06:21 crc kubenswrapper[4693]: I1008 08:06:21.364121 4693 scope.go:117] "RemoveContainer" containerID="288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988" Oct 08 08:06:21 crc kubenswrapper[4693]: E1008 08:06:21.366377 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:06:35 crc kubenswrapper[4693]: I1008 08:06:35.377998 4693 scope.go:117] "RemoveContainer" containerID="288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988" Oct 08 08:06:35 crc kubenswrapper[4693]: E1008 08:06:35.379525 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:06:48 crc kubenswrapper[4693]: I1008 08:06:48.363871 4693 scope.go:117] "RemoveContainer" containerID="288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988" Oct 08 08:06:48 crc kubenswrapper[4693]: E1008 08:06:48.364593 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:07:02 crc kubenswrapper[4693]: I1008 08:07:02.364017 4693 scope.go:117] "RemoveContainer" containerID="288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988" Oct 08 08:07:02 crc kubenswrapper[4693]: E1008 08:07:02.366380 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:07:16 crc kubenswrapper[4693]: I1008 08:07:16.363480 4693 scope.go:117] "RemoveContainer" containerID="288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988" Oct 08 08:07:16 crc kubenswrapper[4693]: E1008 08:07:16.365168 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:07:30 crc kubenswrapper[4693]: I1008 08:07:30.363131 4693 scope.go:117] "RemoveContainer" containerID="288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988" Oct 08 08:07:30 crc kubenswrapper[4693]: E1008 08:07:30.364187 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:07:45 crc kubenswrapper[4693]: I1008 08:07:45.374549 4693 scope.go:117] "RemoveContainer" containerID="288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988" Oct 08 08:07:45 crc kubenswrapper[4693]: E1008 08:07:45.375850 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:08:00 crc kubenswrapper[4693]: I1008 08:08:00.362918 4693 scope.go:117] "RemoveContainer" containerID="288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988" Oct 08 08:08:00 crc kubenswrapper[4693]: E1008 08:08:00.363887 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:08:13 crc kubenswrapper[4693]: I1008 08:08:13.363264 4693 scope.go:117] "RemoveContainer" containerID="288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988" Oct 08 08:08:13 crc kubenswrapper[4693]: E1008 08:08:13.364023 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:08:24 crc kubenswrapper[4693]: I1008 08:08:24.363976 4693 scope.go:117] "RemoveContainer" containerID="288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988" Oct 08 08:08:24 crc kubenswrapper[4693]: E1008 08:08:24.365086 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:08:37 crc kubenswrapper[4693]: I1008 08:08:37.363738 4693 scope.go:117] "RemoveContainer" containerID="288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988" Oct 08 08:08:37 crc kubenswrapper[4693]: E1008 08:08:37.364921 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:08:48 crc kubenswrapper[4693]: I1008 08:08:48.363074 4693 scope.go:117] "RemoveContainer" containerID="288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988" Oct 08 08:08:48 crc kubenswrapper[4693]: E1008 08:08:48.363752 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:08:59 crc kubenswrapper[4693]: I1008 08:08:59.363378 4693 scope.go:117] "RemoveContainer" containerID="288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988" Oct 08 08:08:59 crc kubenswrapper[4693]: E1008 08:08:59.364281 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:09:12 crc kubenswrapper[4693]: I1008 08:09:12.364368 4693 scope.go:117] "RemoveContainer" containerID="288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988" Oct 08 08:09:12 crc kubenswrapper[4693]: E1008 08:09:12.365516 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:09:26 crc kubenswrapper[4693]: I1008 08:09:26.363612 4693 scope.go:117] "RemoveContainer" containerID="288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988" Oct 08 08:09:27 crc kubenswrapper[4693]: I1008 08:09:27.293915 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerStarted","Data":"6c7bb5f0ad5db721978259349bf27e3d7356eacafd118f6db51f957705571e0c"} Oct 08 08:09:27 crc kubenswrapper[4693]: I1008 08:09:27.322728 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=295.475654066 podStartE2EDuration="5m39.322705908s" podCreationTimestamp="2025-10-08 08:03:48 +0000 UTC" firstStartedPulling="2025-10-08 08:03:50.017733726 +0000 UTC m=+2815.388698701" lastFinishedPulling="2025-10-08 08:04:33.864785598 +0000 UTC m=+2859.235750543" observedRunningTime="2025-10-08 08:04:36.008623108 +0000 UTC m=+2861.379588043" watchObservedRunningTime="2025-10-08 08:09:27.322705908 +0000 UTC m=+3152.693670843" Oct 08 08:10:19 crc kubenswrapper[4693]: I1008 08:10:19.633969 4693 scope.go:117] "RemoveContainer" containerID="b8d6e053663490944fbb924ba10808db814d0905fac2eec77c7c56df42fa2dad" Oct 08 08:10:19 crc kubenswrapper[4693]: I1008 08:10:19.664479 4693 scope.go:117] "RemoveContainer" containerID="2cdb267080263e425b88a85a97c241ea8ae98295098b303e324847ef0793c950" Oct 08 08:10:19 crc kubenswrapper[4693]: I1008 08:10:19.684998 4693 scope.go:117] "RemoveContainer" containerID="499d26b8d57a7ce0d3feeceb7f0cdb4edfd055e9f191b70bcd6d2cdc14d89313" Oct 08 08:10:46 crc kubenswrapper[4693]: I1008 08:10:46.699675 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-42p6p"] Oct 08 08:10:46 crc kubenswrapper[4693]: E1008 08:10:46.701242 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15337f12-d635-4d8e-af9b-632078ed421a" containerName="extract-utilities" Oct 08 08:10:46 crc kubenswrapper[4693]: I1008 08:10:46.701266 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="15337f12-d635-4d8e-af9b-632078ed421a" containerName="extract-utilities" Oct 08 08:10:46 crc kubenswrapper[4693]: E1008 08:10:46.701283 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15337f12-d635-4d8e-af9b-632078ed421a" containerName="registry-server" Oct 08 08:10:46 crc kubenswrapper[4693]: I1008 08:10:46.701293 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="15337f12-d635-4d8e-af9b-632078ed421a" containerName="registry-server" Oct 08 08:10:46 crc kubenswrapper[4693]: E1008 08:10:46.701346 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15337f12-d635-4d8e-af9b-632078ed421a" containerName="extract-content" Oct 08 08:10:46 crc kubenswrapper[4693]: I1008 08:10:46.701359 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="15337f12-d635-4d8e-af9b-632078ed421a" containerName="extract-content" Oct 08 08:10:46 crc kubenswrapper[4693]: I1008 08:10:46.701679 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="15337f12-d635-4d8e-af9b-632078ed421a" containerName="registry-server" Oct 08 08:10:46 crc kubenswrapper[4693]: I1008 08:10:46.718493 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-42p6p"] Oct 08 08:10:46 crc kubenswrapper[4693]: I1008 08:10:46.718637 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-42p6p" Oct 08 08:10:46 crc kubenswrapper[4693]: I1008 08:10:46.804738 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njnxb\" (UniqueName: \"kubernetes.io/projected/6c62c537-cc5e-4ae7-8283-9bb5d5e6c050-kube-api-access-njnxb\") pod \"redhat-marketplace-42p6p\" (UID: \"6c62c537-cc5e-4ae7-8283-9bb5d5e6c050\") " pod="openshift-marketplace/redhat-marketplace-42p6p" Oct 08 08:10:46 crc kubenswrapper[4693]: I1008 08:10:46.805275 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c62c537-cc5e-4ae7-8283-9bb5d5e6c050-utilities\") pod \"redhat-marketplace-42p6p\" (UID: \"6c62c537-cc5e-4ae7-8283-9bb5d5e6c050\") " pod="openshift-marketplace/redhat-marketplace-42p6p" Oct 08 08:10:46 crc kubenswrapper[4693]: I1008 08:10:46.805345 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c62c537-cc5e-4ae7-8283-9bb5d5e6c050-catalog-content\") pod \"redhat-marketplace-42p6p\" (UID: \"6c62c537-cc5e-4ae7-8283-9bb5d5e6c050\") " pod="openshift-marketplace/redhat-marketplace-42p6p" Oct 08 08:10:46 crc kubenswrapper[4693]: I1008 08:10:46.907316 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c62c537-cc5e-4ae7-8283-9bb5d5e6c050-catalog-content\") pod \"redhat-marketplace-42p6p\" (UID: \"6c62c537-cc5e-4ae7-8283-9bb5d5e6c050\") " pod="openshift-marketplace/redhat-marketplace-42p6p" Oct 08 08:10:46 crc kubenswrapper[4693]: I1008 08:10:46.907466 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njnxb\" (UniqueName: \"kubernetes.io/projected/6c62c537-cc5e-4ae7-8283-9bb5d5e6c050-kube-api-access-njnxb\") pod \"redhat-marketplace-42p6p\" (UID: \"6c62c537-cc5e-4ae7-8283-9bb5d5e6c050\") " pod="openshift-marketplace/redhat-marketplace-42p6p" Oct 08 08:10:46 crc kubenswrapper[4693]: I1008 08:10:46.907558 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c62c537-cc5e-4ae7-8283-9bb5d5e6c050-utilities\") pod \"redhat-marketplace-42p6p\" (UID: \"6c62c537-cc5e-4ae7-8283-9bb5d5e6c050\") " pod="openshift-marketplace/redhat-marketplace-42p6p" Oct 08 08:10:46 crc kubenswrapper[4693]: I1008 08:10:46.908007 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c62c537-cc5e-4ae7-8283-9bb5d5e6c050-utilities\") pod \"redhat-marketplace-42p6p\" (UID: \"6c62c537-cc5e-4ae7-8283-9bb5d5e6c050\") " pod="openshift-marketplace/redhat-marketplace-42p6p" Oct 08 08:10:46 crc kubenswrapper[4693]: I1008 08:10:46.908007 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c62c537-cc5e-4ae7-8283-9bb5d5e6c050-catalog-content\") pod \"redhat-marketplace-42p6p\" (UID: \"6c62c537-cc5e-4ae7-8283-9bb5d5e6c050\") " pod="openshift-marketplace/redhat-marketplace-42p6p" Oct 08 08:10:46 crc kubenswrapper[4693]: I1008 08:10:46.945217 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njnxb\" (UniqueName: \"kubernetes.io/projected/6c62c537-cc5e-4ae7-8283-9bb5d5e6c050-kube-api-access-njnxb\") pod \"redhat-marketplace-42p6p\" (UID: \"6c62c537-cc5e-4ae7-8283-9bb5d5e6c050\") " pod="openshift-marketplace/redhat-marketplace-42p6p" Oct 08 08:10:47 crc kubenswrapper[4693]: I1008 08:10:47.051663 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-42p6p" Oct 08 08:10:47 crc kubenswrapper[4693]: I1008 08:10:47.558549 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-42p6p"] Oct 08 08:10:48 crc kubenswrapper[4693]: I1008 08:10:48.215125 4693 generic.go:334] "Generic (PLEG): container finished" podID="6c62c537-cc5e-4ae7-8283-9bb5d5e6c050" containerID="34ac80e8173f9b35ad6fdf17195354f2324f7df4df10a114ac38d6383d7209a6" exitCode=0 Oct 08 08:10:48 crc kubenswrapper[4693]: I1008 08:10:48.215174 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-42p6p" event={"ID":"6c62c537-cc5e-4ae7-8283-9bb5d5e6c050","Type":"ContainerDied","Data":"34ac80e8173f9b35ad6fdf17195354f2324f7df4df10a114ac38d6383d7209a6"} Oct 08 08:10:48 crc kubenswrapper[4693]: I1008 08:10:48.215400 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-42p6p" event={"ID":"6c62c537-cc5e-4ae7-8283-9bb5d5e6c050","Type":"ContainerStarted","Data":"1ca7e7ee6f1ead4ac51160a6b3f9a7d01094b505d8f6e844a9941205b46ae6fa"} Oct 08 08:10:48 crc kubenswrapper[4693]: I1008 08:10:48.217473 4693 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 08 08:10:49 crc kubenswrapper[4693]: I1008 08:10:49.229951 4693 generic.go:334] "Generic (PLEG): container finished" podID="6c62c537-cc5e-4ae7-8283-9bb5d5e6c050" containerID="9a8948a07e35896d28836d8201fe6cdada2c48e4b9ea04ccd466d6f313469614" exitCode=0 Oct 08 08:10:49 crc kubenswrapper[4693]: I1008 08:10:49.230049 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-42p6p" event={"ID":"6c62c537-cc5e-4ae7-8283-9bb5d5e6c050","Type":"ContainerDied","Data":"9a8948a07e35896d28836d8201fe6cdada2c48e4b9ea04ccd466d6f313469614"} Oct 08 08:10:50 crc kubenswrapper[4693]: I1008 08:10:50.243120 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-42p6p" event={"ID":"6c62c537-cc5e-4ae7-8283-9bb5d5e6c050","Type":"ContainerStarted","Data":"21a2808e225ff48562decb8b3ef6cd2e8122f1b1047047920406869f05da9722"} Oct 08 08:10:50 crc kubenswrapper[4693]: I1008 08:10:50.263790 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-42p6p" podStartSLOduration=2.808071713 podStartE2EDuration="4.263773645s" podCreationTimestamp="2025-10-08 08:10:46 +0000 UTC" firstStartedPulling="2025-10-08 08:10:48.217180412 +0000 UTC m=+3233.588145347" lastFinishedPulling="2025-10-08 08:10:49.672882344 +0000 UTC m=+3235.043847279" observedRunningTime="2025-10-08 08:10:50.263181669 +0000 UTC m=+3235.634146634" watchObservedRunningTime="2025-10-08 08:10:50.263773645 +0000 UTC m=+3235.634738580" Oct 08 08:10:57 crc kubenswrapper[4693]: I1008 08:10:57.052693 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-42p6p" Oct 08 08:10:57 crc kubenswrapper[4693]: I1008 08:10:57.053315 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-42p6p" Oct 08 08:10:57 crc kubenswrapper[4693]: I1008 08:10:57.128457 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-42p6p" Oct 08 08:10:57 crc kubenswrapper[4693]: I1008 08:10:57.389357 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-42p6p" Oct 08 08:10:57 crc kubenswrapper[4693]: I1008 08:10:57.450105 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-42p6p"] Oct 08 08:10:59 crc kubenswrapper[4693]: I1008 08:10:59.358889 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-42p6p" podUID="6c62c537-cc5e-4ae7-8283-9bb5d5e6c050" containerName="registry-server" containerID="cri-o://21a2808e225ff48562decb8b3ef6cd2e8122f1b1047047920406869f05da9722" gracePeriod=2 Oct 08 08:10:59 crc kubenswrapper[4693]: I1008 08:10:59.972334 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-42p6p" Oct 08 08:11:00 crc kubenswrapper[4693]: I1008 08:11:00.069897 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c62c537-cc5e-4ae7-8283-9bb5d5e6c050-utilities\") pod \"6c62c537-cc5e-4ae7-8283-9bb5d5e6c050\" (UID: \"6c62c537-cc5e-4ae7-8283-9bb5d5e6c050\") " Oct 08 08:11:00 crc kubenswrapper[4693]: I1008 08:11:00.070061 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-njnxb\" (UniqueName: \"kubernetes.io/projected/6c62c537-cc5e-4ae7-8283-9bb5d5e6c050-kube-api-access-njnxb\") pod \"6c62c537-cc5e-4ae7-8283-9bb5d5e6c050\" (UID: \"6c62c537-cc5e-4ae7-8283-9bb5d5e6c050\") " Oct 08 08:11:00 crc kubenswrapper[4693]: I1008 08:11:00.070090 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c62c537-cc5e-4ae7-8283-9bb5d5e6c050-catalog-content\") pod \"6c62c537-cc5e-4ae7-8283-9bb5d5e6c050\" (UID: \"6c62c537-cc5e-4ae7-8283-9bb5d5e6c050\") " Oct 08 08:11:00 crc kubenswrapper[4693]: I1008 08:11:00.071373 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c62c537-cc5e-4ae7-8283-9bb5d5e6c050-utilities" (OuterVolumeSpecName: "utilities") pod "6c62c537-cc5e-4ae7-8283-9bb5d5e6c050" (UID: "6c62c537-cc5e-4ae7-8283-9bb5d5e6c050"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:11:00 crc kubenswrapper[4693]: I1008 08:11:00.079051 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c62c537-cc5e-4ae7-8283-9bb5d5e6c050-kube-api-access-njnxb" (OuterVolumeSpecName: "kube-api-access-njnxb") pod "6c62c537-cc5e-4ae7-8283-9bb5d5e6c050" (UID: "6c62c537-cc5e-4ae7-8283-9bb5d5e6c050"). InnerVolumeSpecName "kube-api-access-njnxb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 08:11:00 crc kubenswrapper[4693]: I1008 08:11:00.085078 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c62c537-cc5e-4ae7-8283-9bb5d5e6c050-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6c62c537-cc5e-4ae7-8283-9bb5d5e6c050" (UID: "6c62c537-cc5e-4ae7-8283-9bb5d5e6c050"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:11:00 crc kubenswrapper[4693]: I1008 08:11:00.172835 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c62c537-cc5e-4ae7-8283-9bb5d5e6c050-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 08:11:00 crc kubenswrapper[4693]: I1008 08:11:00.172889 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-njnxb\" (UniqueName: \"kubernetes.io/projected/6c62c537-cc5e-4ae7-8283-9bb5d5e6c050-kube-api-access-njnxb\") on node \"crc\" DevicePath \"\"" Oct 08 08:11:00 crc kubenswrapper[4693]: I1008 08:11:00.172908 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c62c537-cc5e-4ae7-8283-9bb5d5e6c050-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 08:11:00 crc kubenswrapper[4693]: I1008 08:11:00.373849 4693 generic.go:334] "Generic (PLEG): container finished" podID="6c62c537-cc5e-4ae7-8283-9bb5d5e6c050" containerID="21a2808e225ff48562decb8b3ef6cd2e8122f1b1047047920406869f05da9722" exitCode=0 Oct 08 08:11:00 crc kubenswrapper[4693]: I1008 08:11:00.373899 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-42p6p" event={"ID":"6c62c537-cc5e-4ae7-8283-9bb5d5e6c050","Type":"ContainerDied","Data":"21a2808e225ff48562decb8b3ef6cd2e8122f1b1047047920406869f05da9722"} Oct 08 08:11:00 crc kubenswrapper[4693]: I1008 08:11:00.373927 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-42p6p" Oct 08 08:11:00 crc kubenswrapper[4693]: I1008 08:11:00.373947 4693 scope.go:117] "RemoveContainer" containerID="21a2808e225ff48562decb8b3ef6cd2e8122f1b1047047920406869f05da9722" Oct 08 08:11:00 crc kubenswrapper[4693]: I1008 08:11:00.373932 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-42p6p" event={"ID":"6c62c537-cc5e-4ae7-8283-9bb5d5e6c050","Type":"ContainerDied","Data":"1ca7e7ee6f1ead4ac51160a6b3f9a7d01094b505d8f6e844a9941205b46ae6fa"} Oct 08 08:11:00 crc kubenswrapper[4693]: I1008 08:11:00.420953 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-42p6p"] Oct 08 08:11:00 crc kubenswrapper[4693]: I1008 08:11:00.422529 4693 scope.go:117] "RemoveContainer" containerID="9a8948a07e35896d28836d8201fe6cdada2c48e4b9ea04ccd466d6f313469614" Oct 08 08:11:00 crc kubenswrapper[4693]: I1008 08:11:00.449558 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-42p6p"] Oct 08 08:11:00 crc kubenswrapper[4693]: I1008 08:11:00.464525 4693 scope.go:117] "RemoveContainer" containerID="34ac80e8173f9b35ad6fdf17195354f2324f7df4df10a114ac38d6383d7209a6" Oct 08 08:11:00 crc kubenswrapper[4693]: I1008 08:11:00.514957 4693 scope.go:117] "RemoveContainer" containerID="21a2808e225ff48562decb8b3ef6cd2e8122f1b1047047920406869f05da9722" Oct 08 08:11:00 crc kubenswrapper[4693]: E1008 08:11:00.515630 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21a2808e225ff48562decb8b3ef6cd2e8122f1b1047047920406869f05da9722\": container with ID starting with 21a2808e225ff48562decb8b3ef6cd2e8122f1b1047047920406869f05da9722 not found: ID does not exist" containerID="21a2808e225ff48562decb8b3ef6cd2e8122f1b1047047920406869f05da9722" Oct 08 08:11:00 crc kubenswrapper[4693]: I1008 08:11:00.515712 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21a2808e225ff48562decb8b3ef6cd2e8122f1b1047047920406869f05da9722"} err="failed to get container status \"21a2808e225ff48562decb8b3ef6cd2e8122f1b1047047920406869f05da9722\": rpc error: code = NotFound desc = could not find container \"21a2808e225ff48562decb8b3ef6cd2e8122f1b1047047920406869f05da9722\": container with ID starting with 21a2808e225ff48562decb8b3ef6cd2e8122f1b1047047920406869f05da9722 not found: ID does not exist" Oct 08 08:11:00 crc kubenswrapper[4693]: I1008 08:11:00.515747 4693 scope.go:117] "RemoveContainer" containerID="9a8948a07e35896d28836d8201fe6cdada2c48e4b9ea04ccd466d6f313469614" Oct 08 08:11:00 crc kubenswrapper[4693]: E1008 08:11:00.516438 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a8948a07e35896d28836d8201fe6cdada2c48e4b9ea04ccd466d6f313469614\": container with ID starting with 9a8948a07e35896d28836d8201fe6cdada2c48e4b9ea04ccd466d6f313469614 not found: ID does not exist" containerID="9a8948a07e35896d28836d8201fe6cdada2c48e4b9ea04ccd466d6f313469614" Oct 08 08:11:00 crc kubenswrapper[4693]: I1008 08:11:00.516509 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a8948a07e35896d28836d8201fe6cdada2c48e4b9ea04ccd466d6f313469614"} err="failed to get container status \"9a8948a07e35896d28836d8201fe6cdada2c48e4b9ea04ccd466d6f313469614\": rpc error: code = NotFound desc = could not find container \"9a8948a07e35896d28836d8201fe6cdada2c48e4b9ea04ccd466d6f313469614\": container with ID starting with 9a8948a07e35896d28836d8201fe6cdada2c48e4b9ea04ccd466d6f313469614 not found: ID does not exist" Oct 08 08:11:00 crc kubenswrapper[4693]: I1008 08:11:00.516565 4693 scope.go:117] "RemoveContainer" containerID="34ac80e8173f9b35ad6fdf17195354f2324f7df4df10a114ac38d6383d7209a6" Oct 08 08:11:00 crc kubenswrapper[4693]: E1008 08:11:00.518115 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34ac80e8173f9b35ad6fdf17195354f2324f7df4df10a114ac38d6383d7209a6\": container with ID starting with 34ac80e8173f9b35ad6fdf17195354f2324f7df4df10a114ac38d6383d7209a6 not found: ID does not exist" containerID="34ac80e8173f9b35ad6fdf17195354f2324f7df4df10a114ac38d6383d7209a6" Oct 08 08:11:00 crc kubenswrapper[4693]: I1008 08:11:00.520015 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34ac80e8173f9b35ad6fdf17195354f2324f7df4df10a114ac38d6383d7209a6"} err="failed to get container status \"34ac80e8173f9b35ad6fdf17195354f2324f7df4df10a114ac38d6383d7209a6\": rpc error: code = NotFound desc = could not find container \"34ac80e8173f9b35ad6fdf17195354f2324f7df4df10a114ac38d6383d7209a6\": container with ID starting with 34ac80e8173f9b35ad6fdf17195354f2324f7df4df10a114ac38d6383d7209a6 not found: ID does not exist" Oct 08 08:11:01 crc kubenswrapper[4693]: I1008 08:11:01.380323 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c62c537-cc5e-4ae7-8283-9bb5d5e6c050" path="/var/lib/kubelet/pods/6c62c537-cc5e-4ae7-8283-9bb5d5e6c050/volumes" Oct 08 08:11:53 crc kubenswrapper[4693]: I1008 08:11:53.489940 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 08:11:53 crc kubenswrapper[4693]: I1008 08:11:53.490859 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 08:12:18 crc kubenswrapper[4693]: I1008 08:12:18.662736 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-69ngc"] Oct 08 08:12:18 crc kubenswrapper[4693]: E1008 08:12:18.664033 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c62c537-cc5e-4ae7-8283-9bb5d5e6c050" containerName="extract-content" Oct 08 08:12:18 crc kubenswrapper[4693]: I1008 08:12:18.664061 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c62c537-cc5e-4ae7-8283-9bb5d5e6c050" containerName="extract-content" Oct 08 08:12:18 crc kubenswrapper[4693]: E1008 08:12:18.664131 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c62c537-cc5e-4ae7-8283-9bb5d5e6c050" containerName="registry-server" Oct 08 08:12:18 crc kubenswrapper[4693]: I1008 08:12:18.664145 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c62c537-cc5e-4ae7-8283-9bb5d5e6c050" containerName="registry-server" Oct 08 08:12:18 crc kubenswrapper[4693]: E1008 08:12:18.664163 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c62c537-cc5e-4ae7-8283-9bb5d5e6c050" containerName="extract-utilities" Oct 08 08:12:18 crc kubenswrapper[4693]: I1008 08:12:18.664176 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c62c537-cc5e-4ae7-8283-9bb5d5e6c050" containerName="extract-utilities" Oct 08 08:12:18 crc kubenswrapper[4693]: I1008 08:12:18.664489 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c62c537-cc5e-4ae7-8283-9bb5d5e6c050" containerName="registry-server" Oct 08 08:12:18 crc kubenswrapper[4693]: I1008 08:12:18.667583 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-69ngc" Oct 08 08:12:18 crc kubenswrapper[4693]: I1008 08:12:18.676418 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-69ngc"] Oct 08 08:12:18 crc kubenswrapper[4693]: I1008 08:12:18.751468 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3ca770e-1853-48e2-8a2b-8192c16ffa78-utilities\") pod \"redhat-operators-69ngc\" (UID: \"e3ca770e-1853-48e2-8a2b-8192c16ffa78\") " pod="openshift-marketplace/redhat-operators-69ngc" Oct 08 08:12:18 crc kubenswrapper[4693]: I1008 08:12:18.751554 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l27v5\" (UniqueName: \"kubernetes.io/projected/e3ca770e-1853-48e2-8a2b-8192c16ffa78-kube-api-access-l27v5\") pod \"redhat-operators-69ngc\" (UID: \"e3ca770e-1853-48e2-8a2b-8192c16ffa78\") " pod="openshift-marketplace/redhat-operators-69ngc" Oct 08 08:12:18 crc kubenswrapper[4693]: I1008 08:12:18.751598 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3ca770e-1853-48e2-8a2b-8192c16ffa78-catalog-content\") pod \"redhat-operators-69ngc\" (UID: \"e3ca770e-1853-48e2-8a2b-8192c16ffa78\") " pod="openshift-marketplace/redhat-operators-69ngc" Oct 08 08:12:18 crc kubenswrapper[4693]: I1008 08:12:18.854480 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3ca770e-1853-48e2-8a2b-8192c16ffa78-catalog-content\") pod \"redhat-operators-69ngc\" (UID: \"e3ca770e-1853-48e2-8a2b-8192c16ffa78\") " pod="openshift-marketplace/redhat-operators-69ngc" Oct 08 08:12:18 crc kubenswrapper[4693]: I1008 08:12:18.854732 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3ca770e-1853-48e2-8a2b-8192c16ffa78-utilities\") pod \"redhat-operators-69ngc\" (UID: \"e3ca770e-1853-48e2-8a2b-8192c16ffa78\") " pod="openshift-marketplace/redhat-operators-69ngc" Oct 08 08:12:18 crc kubenswrapper[4693]: I1008 08:12:18.854880 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l27v5\" (UniqueName: \"kubernetes.io/projected/e3ca770e-1853-48e2-8a2b-8192c16ffa78-kube-api-access-l27v5\") pod \"redhat-operators-69ngc\" (UID: \"e3ca770e-1853-48e2-8a2b-8192c16ffa78\") " pod="openshift-marketplace/redhat-operators-69ngc" Oct 08 08:12:18 crc kubenswrapper[4693]: I1008 08:12:18.855145 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3ca770e-1853-48e2-8a2b-8192c16ffa78-catalog-content\") pod \"redhat-operators-69ngc\" (UID: \"e3ca770e-1853-48e2-8a2b-8192c16ffa78\") " pod="openshift-marketplace/redhat-operators-69ngc" Oct 08 08:12:18 crc kubenswrapper[4693]: I1008 08:12:18.855693 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3ca770e-1853-48e2-8a2b-8192c16ffa78-utilities\") pod \"redhat-operators-69ngc\" (UID: \"e3ca770e-1853-48e2-8a2b-8192c16ffa78\") " pod="openshift-marketplace/redhat-operators-69ngc" Oct 08 08:12:18 crc kubenswrapper[4693]: I1008 08:12:18.880730 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l27v5\" (UniqueName: \"kubernetes.io/projected/e3ca770e-1853-48e2-8a2b-8192c16ffa78-kube-api-access-l27v5\") pod \"redhat-operators-69ngc\" (UID: \"e3ca770e-1853-48e2-8a2b-8192c16ffa78\") " pod="openshift-marketplace/redhat-operators-69ngc" Oct 08 08:12:19 crc kubenswrapper[4693]: I1008 08:12:19.036976 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-69ngc" Oct 08 08:12:19 crc kubenswrapper[4693]: I1008 08:12:19.557793 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-69ngc"] Oct 08 08:12:20 crc kubenswrapper[4693]: I1008 08:12:20.252862 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hvjgc"] Oct 08 08:12:20 crc kubenswrapper[4693]: I1008 08:12:20.254973 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hvjgc" Oct 08 08:12:20 crc kubenswrapper[4693]: I1008 08:12:20.266607 4693 generic.go:334] "Generic (PLEG): container finished" podID="e3ca770e-1853-48e2-8a2b-8192c16ffa78" containerID="15d07f593fd062b903ab5669c2932f102643319bb17f2d59da2f6f5298c32691" exitCode=0 Oct 08 08:12:20 crc kubenswrapper[4693]: I1008 08:12:20.266640 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-69ngc" event={"ID":"e3ca770e-1853-48e2-8a2b-8192c16ffa78","Type":"ContainerDied","Data":"15d07f593fd062b903ab5669c2932f102643319bb17f2d59da2f6f5298c32691"} Oct 08 08:12:20 crc kubenswrapper[4693]: I1008 08:12:20.266664 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-69ngc" event={"ID":"e3ca770e-1853-48e2-8a2b-8192c16ffa78","Type":"ContainerStarted","Data":"e5fda5af6ec20774a9ad2621b3ec438010025c91f443d1759b8c095732b6d298"} Oct 08 08:12:20 crc kubenswrapper[4693]: I1008 08:12:20.269570 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hvjgc"] Oct 08 08:12:20 crc kubenswrapper[4693]: I1008 08:12:20.396166 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2bm6\" (UniqueName: \"kubernetes.io/projected/3835018e-03f3-4104-b0d4-caad159f7a43-kube-api-access-q2bm6\") pod \"certified-operators-hvjgc\" (UID: \"3835018e-03f3-4104-b0d4-caad159f7a43\") " pod="openshift-marketplace/certified-operators-hvjgc" Oct 08 08:12:20 crc kubenswrapper[4693]: I1008 08:12:20.396271 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3835018e-03f3-4104-b0d4-caad159f7a43-catalog-content\") pod \"certified-operators-hvjgc\" (UID: \"3835018e-03f3-4104-b0d4-caad159f7a43\") " pod="openshift-marketplace/certified-operators-hvjgc" Oct 08 08:12:20 crc kubenswrapper[4693]: I1008 08:12:20.396374 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3835018e-03f3-4104-b0d4-caad159f7a43-utilities\") pod \"certified-operators-hvjgc\" (UID: \"3835018e-03f3-4104-b0d4-caad159f7a43\") " pod="openshift-marketplace/certified-operators-hvjgc" Oct 08 08:12:20 crc kubenswrapper[4693]: I1008 08:12:20.498580 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2bm6\" (UniqueName: \"kubernetes.io/projected/3835018e-03f3-4104-b0d4-caad159f7a43-kube-api-access-q2bm6\") pod \"certified-operators-hvjgc\" (UID: \"3835018e-03f3-4104-b0d4-caad159f7a43\") " pod="openshift-marketplace/certified-operators-hvjgc" Oct 08 08:12:20 crc kubenswrapper[4693]: I1008 08:12:20.498669 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3835018e-03f3-4104-b0d4-caad159f7a43-catalog-content\") pod \"certified-operators-hvjgc\" (UID: \"3835018e-03f3-4104-b0d4-caad159f7a43\") " pod="openshift-marketplace/certified-operators-hvjgc" Oct 08 08:12:20 crc kubenswrapper[4693]: I1008 08:12:20.498740 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3835018e-03f3-4104-b0d4-caad159f7a43-utilities\") pod \"certified-operators-hvjgc\" (UID: \"3835018e-03f3-4104-b0d4-caad159f7a43\") " pod="openshift-marketplace/certified-operators-hvjgc" Oct 08 08:12:20 crc kubenswrapper[4693]: I1008 08:12:20.499248 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3835018e-03f3-4104-b0d4-caad159f7a43-utilities\") pod \"certified-operators-hvjgc\" (UID: \"3835018e-03f3-4104-b0d4-caad159f7a43\") " pod="openshift-marketplace/certified-operators-hvjgc" Oct 08 08:12:20 crc kubenswrapper[4693]: I1008 08:12:20.499388 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3835018e-03f3-4104-b0d4-caad159f7a43-catalog-content\") pod \"certified-operators-hvjgc\" (UID: \"3835018e-03f3-4104-b0d4-caad159f7a43\") " pod="openshift-marketplace/certified-operators-hvjgc" Oct 08 08:12:20 crc kubenswrapper[4693]: I1008 08:12:20.527862 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2bm6\" (UniqueName: \"kubernetes.io/projected/3835018e-03f3-4104-b0d4-caad159f7a43-kube-api-access-q2bm6\") pod \"certified-operators-hvjgc\" (UID: \"3835018e-03f3-4104-b0d4-caad159f7a43\") " pod="openshift-marketplace/certified-operators-hvjgc" Oct 08 08:12:20 crc kubenswrapper[4693]: I1008 08:12:20.588229 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hvjgc" Oct 08 08:12:21 crc kubenswrapper[4693]: I1008 08:12:21.061972 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hvjgc"] Oct 08 08:12:21 crc kubenswrapper[4693]: I1008 08:12:21.283310 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hvjgc" event={"ID":"3835018e-03f3-4104-b0d4-caad159f7a43","Type":"ContainerStarted","Data":"019b3a50be373354e358ba61cda037efb3dcf1af36c06ca3735f1a4c271c0ca2"} Oct 08 08:12:22 crc kubenswrapper[4693]: I1008 08:12:22.295508 4693 generic.go:334] "Generic (PLEG): container finished" podID="3835018e-03f3-4104-b0d4-caad159f7a43" containerID="eb98d2bd9d5ab16c510ea462bb5ad06893a735d54f90e51b25d626ad38004c6a" exitCode=0 Oct 08 08:12:22 crc kubenswrapper[4693]: I1008 08:12:22.295601 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hvjgc" event={"ID":"3835018e-03f3-4104-b0d4-caad159f7a43","Type":"ContainerDied","Data":"eb98d2bd9d5ab16c510ea462bb5ad06893a735d54f90e51b25d626ad38004c6a"} Oct 08 08:12:22 crc kubenswrapper[4693]: I1008 08:12:22.299591 4693 generic.go:334] "Generic (PLEG): container finished" podID="e3ca770e-1853-48e2-8a2b-8192c16ffa78" containerID="0f643f27a086ff4cebfe90106c8db85d19ade3b7710cc9d24c0ba2f195ad83dd" exitCode=0 Oct 08 08:12:22 crc kubenswrapper[4693]: I1008 08:12:22.299617 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-69ngc" event={"ID":"e3ca770e-1853-48e2-8a2b-8192c16ffa78","Type":"ContainerDied","Data":"0f643f27a086ff4cebfe90106c8db85d19ade3b7710cc9d24c0ba2f195ad83dd"} Oct 08 08:12:23 crc kubenswrapper[4693]: I1008 08:12:23.314646 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hvjgc" event={"ID":"3835018e-03f3-4104-b0d4-caad159f7a43","Type":"ContainerStarted","Data":"4cbdd8b13548c1e1cfd87f02d4f015b0f45de67ab3f35465aace29f83259da05"} Oct 08 08:12:23 crc kubenswrapper[4693]: I1008 08:12:23.322092 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-69ngc" event={"ID":"e3ca770e-1853-48e2-8a2b-8192c16ffa78","Type":"ContainerStarted","Data":"f4498b03470a5b3c5ad82bf573bb808f1f7102b630e528cfbf0f52dad881ce03"} Oct 08 08:12:23 crc kubenswrapper[4693]: I1008 08:12:23.371295 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-69ngc" podStartSLOduration=2.819109297 podStartE2EDuration="5.37126164s" podCreationTimestamp="2025-10-08 08:12:18 +0000 UTC" firstStartedPulling="2025-10-08 08:12:20.268339515 +0000 UTC m=+3325.639304450" lastFinishedPulling="2025-10-08 08:12:22.820491818 +0000 UTC m=+3328.191456793" observedRunningTime="2025-10-08 08:12:23.362496069 +0000 UTC m=+3328.733461034" watchObservedRunningTime="2025-10-08 08:12:23.37126164 +0000 UTC m=+3328.742226575" Oct 08 08:12:23 crc kubenswrapper[4693]: I1008 08:12:23.489984 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 08:12:23 crc kubenswrapper[4693]: I1008 08:12:23.490066 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 08:12:24 crc kubenswrapper[4693]: I1008 08:12:24.340730 4693 generic.go:334] "Generic (PLEG): container finished" podID="3835018e-03f3-4104-b0d4-caad159f7a43" containerID="4cbdd8b13548c1e1cfd87f02d4f015b0f45de67ab3f35465aace29f83259da05" exitCode=0 Oct 08 08:12:24 crc kubenswrapper[4693]: I1008 08:12:24.340880 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hvjgc" event={"ID":"3835018e-03f3-4104-b0d4-caad159f7a43","Type":"ContainerDied","Data":"4cbdd8b13548c1e1cfd87f02d4f015b0f45de67ab3f35465aace29f83259da05"} Oct 08 08:12:25 crc kubenswrapper[4693]: I1008 08:12:25.352577 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hvjgc" event={"ID":"3835018e-03f3-4104-b0d4-caad159f7a43","Type":"ContainerStarted","Data":"237cd2b09a961205d9f416d9d8558063a495d0e695a43d44eee53f04f8d0ca42"} Oct 08 08:12:25 crc kubenswrapper[4693]: I1008 08:12:25.393550 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hvjgc" podStartSLOduration=2.834372244 podStartE2EDuration="5.393525151s" podCreationTimestamp="2025-10-08 08:12:20 +0000 UTC" firstStartedPulling="2025-10-08 08:12:22.302105809 +0000 UTC m=+3327.673117435" lastFinishedPulling="2025-10-08 08:12:24.861305397 +0000 UTC m=+3330.232270342" observedRunningTime="2025-10-08 08:12:25.380081986 +0000 UTC m=+3330.751046941" watchObservedRunningTime="2025-10-08 08:12:25.393525151 +0000 UTC m=+3330.764490126" Oct 08 08:12:29 crc kubenswrapper[4693]: I1008 08:12:29.037674 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-69ngc" Oct 08 08:12:29 crc kubenswrapper[4693]: I1008 08:12:29.037737 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-69ngc" Oct 08 08:12:30 crc kubenswrapper[4693]: I1008 08:12:30.169163 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-69ngc" podUID="e3ca770e-1853-48e2-8a2b-8192c16ffa78" containerName="registry-server" probeResult="failure" output=< Oct 08 08:12:30 crc kubenswrapper[4693]: timeout: failed to connect service ":50051" within 1s Oct 08 08:12:30 crc kubenswrapper[4693]: > Oct 08 08:12:30 crc kubenswrapper[4693]: I1008 08:12:30.588552 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hvjgc" Oct 08 08:12:30 crc kubenswrapper[4693]: I1008 08:12:30.588619 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hvjgc" Oct 08 08:12:31 crc kubenswrapper[4693]: I1008 08:12:31.658843 4693 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-hvjgc" podUID="3835018e-03f3-4104-b0d4-caad159f7a43" containerName="registry-server" probeResult="failure" output=< Oct 08 08:12:31 crc kubenswrapper[4693]: timeout: failed to connect service ":50051" within 1s Oct 08 08:12:31 crc kubenswrapper[4693]: > Oct 08 08:12:39 crc kubenswrapper[4693]: I1008 08:12:39.090317 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-69ngc" Oct 08 08:12:39 crc kubenswrapper[4693]: I1008 08:12:39.156487 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-69ngc" Oct 08 08:12:39 crc kubenswrapper[4693]: I1008 08:12:39.337114 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-69ngc"] Oct 08 08:12:40 crc kubenswrapper[4693]: I1008 08:12:40.535078 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-69ngc" podUID="e3ca770e-1853-48e2-8a2b-8192c16ffa78" containerName="registry-server" containerID="cri-o://f4498b03470a5b3c5ad82bf573bb808f1f7102b630e528cfbf0f52dad881ce03" gracePeriod=2 Oct 08 08:12:40 crc kubenswrapper[4693]: I1008 08:12:40.675798 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hvjgc" Oct 08 08:12:40 crc kubenswrapper[4693]: I1008 08:12:40.745775 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hvjgc" Oct 08 08:12:41 crc kubenswrapper[4693]: I1008 08:12:41.132747 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-69ngc" Oct 08 08:12:41 crc kubenswrapper[4693]: I1008 08:12:41.178675 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3ca770e-1853-48e2-8a2b-8192c16ffa78-utilities\") pod \"e3ca770e-1853-48e2-8a2b-8192c16ffa78\" (UID: \"e3ca770e-1853-48e2-8a2b-8192c16ffa78\") " Oct 08 08:12:41 crc kubenswrapper[4693]: I1008 08:12:41.178842 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l27v5\" (UniqueName: \"kubernetes.io/projected/e3ca770e-1853-48e2-8a2b-8192c16ffa78-kube-api-access-l27v5\") pod \"e3ca770e-1853-48e2-8a2b-8192c16ffa78\" (UID: \"e3ca770e-1853-48e2-8a2b-8192c16ffa78\") " Oct 08 08:12:41 crc kubenswrapper[4693]: I1008 08:12:41.179002 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3ca770e-1853-48e2-8a2b-8192c16ffa78-catalog-content\") pod \"e3ca770e-1853-48e2-8a2b-8192c16ffa78\" (UID: \"e3ca770e-1853-48e2-8a2b-8192c16ffa78\") " Oct 08 08:12:41 crc kubenswrapper[4693]: I1008 08:12:41.179723 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3ca770e-1853-48e2-8a2b-8192c16ffa78-utilities" (OuterVolumeSpecName: "utilities") pod "e3ca770e-1853-48e2-8a2b-8192c16ffa78" (UID: "e3ca770e-1853-48e2-8a2b-8192c16ffa78"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:12:41 crc kubenswrapper[4693]: I1008 08:12:41.198079 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3ca770e-1853-48e2-8a2b-8192c16ffa78-kube-api-access-l27v5" (OuterVolumeSpecName: "kube-api-access-l27v5") pod "e3ca770e-1853-48e2-8a2b-8192c16ffa78" (UID: "e3ca770e-1853-48e2-8a2b-8192c16ffa78"). InnerVolumeSpecName "kube-api-access-l27v5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 08:12:41 crc kubenswrapper[4693]: I1008 08:12:41.269123 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3ca770e-1853-48e2-8a2b-8192c16ffa78-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e3ca770e-1853-48e2-8a2b-8192c16ffa78" (UID: "e3ca770e-1853-48e2-8a2b-8192c16ffa78"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:12:41 crc kubenswrapper[4693]: I1008 08:12:41.281312 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3ca770e-1853-48e2-8a2b-8192c16ffa78-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 08:12:41 crc kubenswrapper[4693]: I1008 08:12:41.281365 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3ca770e-1853-48e2-8a2b-8192c16ffa78-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 08:12:41 crc kubenswrapper[4693]: I1008 08:12:41.281386 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l27v5\" (UniqueName: \"kubernetes.io/projected/e3ca770e-1853-48e2-8a2b-8192c16ffa78-kube-api-access-l27v5\") on node \"crc\" DevicePath \"\"" Oct 08 08:12:41 crc kubenswrapper[4693]: I1008 08:12:41.548498 4693 generic.go:334] "Generic (PLEG): container finished" podID="e3ca770e-1853-48e2-8a2b-8192c16ffa78" containerID="f4498b03470a5b3c5ad82bf573bb808f1f7102b630e528cfbf0f52dad881ce03" exitCode=0 Oct 08 08:12:41 crc kubenswrapper[4693]: I1008 08:12:41.548772 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-69ngc" event={"ID":"e3ca770e-1853-48e2-8a2b-8192c16ffa78","Type":"ContainerDied","Data":"f4498b03470a5b3c5ad82bf573bb808f1f7102b630e528cfbf0f52dad881ce03"} Oct 08 08:12:41 crc kubenswrapper[4693]: I1008 08:12:41.548881 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-69ngc" Oct 08 08:12:41 crc kubenswrapper[4693]: I1008 08:12:41.549339 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-69ngc" event={"ID":"e3ca770e-1853-48e2-8a2b-8192c16ffa78","Type":"ContainerDied","Data":"e5fda5af6ec20774a9ad2621b3ec438010025c91f443d1759b8c095732b6d298"} Oct 08 08:12:41 crc kubenswrapper[4693]: I1008 08:12:41.549403 4693 scope.go:117] "RemoveContainer" containerID="f4498b03470a5b3c5ad82bf573bb808f1f7102b630e528cfbf0f52dad881ce03" Oct 08 08:12:41 crc kubenswrapper[4693]: I1008 08:12:41.576269 4693 scope.go:117] "RemoveContainer" containerID="0f643f27a086ff4cebfe90106c8db85d19ade3b7710cc9d24c0ba2f195ad83dd" Oct 08 08:12:41 crc kubenswrapper[4693]: I1008 08:12:41.603257 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-69ngc"] Oct 08 08:12:41 crc kubenswrapper[4693]: I1008 08:12:41.614486 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-69ngc"] Oct 08 08:12:41 crc kubenswrapper[4693]: I1008 08:12:41.621996 4693 scope.go:117] "RemoveContainer" containerID="15d07f593fd062b903ab5669c2932f102643319bb17f2d59da2f6f5298c32691" Oct 08 08:12:41 crc kubenswrapper[4693]: I1008 08:12:41.677658 4693 scope.go:117] "RemoveContainer" containerID="f4498b03470a5b3c5ad82bf573bb808f1f7102b630e528cfbf0f52dad881ce03" Oct 08 08:12:41 crc kubenswrapper[4693]: E1008 08:12:41.680501 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4498b03470a5b3c5ad82bf573bb808f1f7102b630e528cfbf0f52dad881ce03\": container with ID starting with f4498b03470a5b3c5ad82bf573bb808f1f7102b630e528cfbf0f52dad881ce03 not found: ID does not exist" containerID="f4498b03470a5b3c5ad82bf573bb808f1f7102b630e528cfbf0f52dad881ce03" Oct 08 08:12:41 crc kubenswrapper[4693]: I1008 08:12:41.680564 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4498b03470a5b3c5ad82bf573bb808f1f7102b630e528cfbf0f52dad881ce03"} err="failed to get container status \"f4498b03470a5b3c5ad82bf573bb808f1f7102b630e528cfbf0f52dad881ce03\": rpc error: code = NotFound desc = could not find container \"f4498b03470a5b3c5ad82bf573bb808f1f7102b630e528cfbf0f52dad881ce03\": container with ID starting with f4498b03470a5b3c5ad82bf573bb808f1f7102b630e528cfbf0f52dad881ce03 not found: ID does not exist" Oct 08 08:12:41 crc kubenswrapper[4693]: I1008 08:12:41.680599 4693 scope.go:117] "RemoveContainer" containerID="0f643f27a086ff4cebfe90106c8db85d19ade3b7710cc9d24c0ba2f195ad83dd" Oct 08 08:12:41 crc kubenswrapper[4693]: E1008 08:12:41.681100 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f643f27a086ff4cebfe90106c8db85d19ade3b7710cc9d24c0ba2f195ad83dd\": container with ID starting with 0f643f27a086ff4cebfe90106c8db85d19ade3b7710cc9d24c0ba2f195ad83dd not found: ID does not exist" containerID="0f643f27a086ff4cebfe90106c8db85d19ade3b7710cc9d24c0ba2f195ad83dd" Oct 08 08:12:41 crc kubenswrapper[4693]: I1008 08:12:41.681145 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f643f27a086ff4cebfe90106c8db85d19ade3b7710cc9d24c0ba2f195ad83dd"} err="failed to get container status \"0f643f27a086ff4cebfe90106c8db85d19ade3b7710cc9d24c0ba2f195ad83dd\": rpc error: code = NotFound desc = could not find container \"0f643f27a086ff4cebfe90106c8db85d19ade3b7710cc9d24c0ba2f195ad83dd\": container with ID starting with 0f643f27a086ff4cebfe90106c8db85d19ade3b7710cc9d24c0ba2f195ad83dd not found: ID does not exist" Oct 08 08:12:41 crc kubenswrapper[4693]: I1008 08:12:41.681180 4693 scope.go:117] "RemoveContainer" containerID="15d07f593fd062b903ab5669c2932f102643319bb17f2d59da2f6f5298c32691" Oct 08 08:12:41 crc kubenswrapper[4693]: E1008 08:12:41.681586 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15d07f593fd062b903ab5669c2932f102643319bb17f2d59da2f6f5298c32691\": container with ID starting with 15d07f593fd062b903ab5669c2932f102643319bb17f2d59da2f6f5298c32691 not found: ID does not exist" containerID="15d07f593fd062b903ab5669c2932f102643319bb17f2d59da2f6f5298c32691" Oct 08 08:12:41 crc kubenswrapper[4693]: I1008 08:12:41.681653 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15d07f593fd062b903ab5669c2932f102643319bb17f2d59da2f6f5298c32691"} err="failed to get container status \"15d07f593fd062b903ab5669c2932f102643319bb17f2d59da2f6f5298c32691\": rpc error: code = NotFound desc = could not find container \"15d07f593fd062b903ab5669c2932f102643319bb17f2d59da2f6f5298c32691\": container with ID starting with 15d07f593fd062b903ab5669c2932f102643319bb17f2d59da2f6f5298c32691 not found: ID does not exist" Oct 08 08:12:42 crc kubenswrapper[4693]: I1008 08:12:42.141358 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hvjgc"] Oct 08 08:12:42 crc kubenswrapper[4693]: I1008 08:12:42.561659 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hvjgc" podUID="3835018e-03f3-4104-b0d4-caad159f7a43" containerName="registry-server" containerID="cri-o://237cd2b09a961205d9f416d9d8558063a495d0e695a43d44eee53f04f8d0ca42" gracePeriod=2 Oct 08 08:12:43 crc kubenswrapper[4693]: I1008 08:12:43.151769 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hvjgc" Oct 08 08:12:43 crc kubenswrapper[4693]: I1008 08:12:43.324281 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2bm6\" (UniqueName: \"kubernetes.io/projected/3835018e-03f3-4104-b0d4-caad159f7a43-kube-api-access-q2bm6\") pod \"3835018e-03f3-4104-b0d4-caad159f7a43\" (UID: \"3835018e-03f3-4104-b0d4-caad159f7a43\") " Oct 08 08:12:43 crc kubenswrapper[4693]: I1008 08:12:43.324480 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3835018e-03f3-4104-b0d4-caad159f7a43-utilities\") pod \"3835018e-03f3-4104-b0d4-caad159f7a43\" (UID: \"3835018e-03f3-4104-b0d4-caad159f7a43\") " Oct 08 08:12:43 crc kubenswrapper[4693]: I1008 08:12:43.324665 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3835018e-03f3-4104-b0d4-caad159f7a43-catalog-content\") pod \"3835018e-03f3-4104-b0d4-caad159f7a43\" (UID: \"3835018e-03f3-4104-b0d4-caad159f7a43\") " Oct 08 08:12:43 crc kubenswrapper[4693]: I1008 08:12:43.326074 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3835018e-03f3-4104-b0d4-caad159f7a43-utilities" (OuterVolumeSpecName: "utilities") pod "3835018e-03f3-4104-b0d4-caad159f7a43" (UID: "3835018e-03f3-4104-b0d4-caad159f7a43"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:12:43 crc kubenswrapper[4693]: I1008 08:12:43.344229 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3835018e-03f3-4104-b0d4-caad159f7a43-kube-api-access-q2bm6" (OuterVolumeSpecName: "kube-api-access-q2bm6") pod "3835018e-03f3-4104-b0d4-caad159f7a43" (UID: "3835018e-03f3-4104-b0d4-caad159f7a43"). InnerVolumeSpecName "kube-api-access-q2bm6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 08:12:43 crc kubenswrapper[4693]: I1008 08:12:43.375229 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3835018e-03f3-4104-b0d4-caad159f7a43-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3835018e-03f3-4104-b0d4-caad159f7a43" (UID: "3835018e-03f3-4104-b0d4-caad159f7a43"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:12:43 crc kubenswrapper[4693]: I1008 08:12:43.384196 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3ca770e-1853-48e2-8a2b-8192c16ffa78" path="/var/lib/kubelet/pods/e3ca770e-1853-48e2-8a2b-8192c16ffa78/volumes" Oct 08 08:12:43 crc kubenswrapper[4693]: I1008 08:12:43.427268 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3835018e-03f3-4104-b0d4-caad159f7a43-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 08:12:43 crc kubenswrapper[4693]: I1008 08:12:43.427308 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3835018e-03f3-4104-b0d4-caad159f7a43-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 08:12:43 crc kubenswrapper[4693]: I1008 08:12:43.427322 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2bm6\" (UniqueName: \"kubernetes.io/projected/3835018e-03f3-4104-b0d4-caad159f7a43-kube-api-access-q2bm6\") on node \"crc\" DevicePath \"\"" Oct 08 08:12:43 crc kubenswrapper[4693]: I1008 08:12:43.577213 4693 generic.go:334] "Generic (PLEG): container finished" podID="3835018e-03f3-4104-b0d4-caad159f7a43" containerID="237cd2b09a961205d9f416d9d8558063a495d0e695a43d44eee53f04f8d0ca42" exitCode=0 Oct 08 08:12:43 crc kubenswrapper[4693]: I1008 08:12:43.577278 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hvjgc" event={"ID":"3835018e-03f3-4104-b0d4-caad159f7a43","Type":"ContainerDied","Data":"237cd2b09a961205d9f416d9d8558063a495d0e695a43d44eee53f04f8d0ca42"} Oct 08 08:12:43 crc kubenswrapper[4693]: I1008 08:12:43.577321 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hvjgc" event={"ID":"3835018e-03f3-4104-b0d4-caad159f7a43","Type":"ContainerDied","Data":"019b3a50be373354e358ba61cda037efb3dcf1af36c06ca3735f1a4c271c0ca2"} Oct 08 08:12:43 crc kubenswrapper[4693]: I1008 08:12:43.577352 4693 scope.go:117] "RemoveContainer" containerID="237cd2b09a961205d9f416d9d8558063a495d0e695a43d44eee53f04f8d0ca42" Oct 08 08:12:43 crc kubenswrapper[4693]: I1008 08:12:43.577553 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hvjgc" Oct 08 08:12:43 crc kubenswrapper[4693]: I1008 08:12:43.617709 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hvjgc"] Oct 08 08:12:43 crc kubenswrapper[4693]: I1008 08:12:43.621277 4693 scope.go:117] "RemoveContainer" containerID="4cbdd8b13548c1e1cfd87f02d4f015b0f45de67ab3f35465aace29f83259da05" Oct 08 08:12:43 crc kubenswrapper[4693]: I1008 08:12:43.625080 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hvjgc"] Oct 08 08:12:43 crc kubenswrapper[4693]: I1008 08:12:43.669132 4693 scope.go:117] "RemoveContainer" containerID="eb98d2bd9d5ab16c510ea462bb5ad06893a735d54f90e51b25d626ad38004c6a" Oct 08 08:12:43 crc kubenswrapper[4693]: I1008 08:12:43.703721 4693 scope.go:117] "RemoveContainer" containerID="237cd2b09a961205d9f416d9d8558063a495d0e695a43d44eee53f04f8d0ca42" Oct 08 08:12:43 crc kubenswrapper[4693]: E1008 08:12:43.704403 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"237cd2b09a961205d9f416d9d8558063a495d0e695a43d44eee53f04f8d0ca42\": container with ID starting with 237cd2b09a961205d9f416d9d8558063a495d0e695a43d44eee53f04f8d0ca42 not found: ID does not exist" containerID="237cd2b09a961205d9f416d9d8558063a495d0e695a43d44eee53f04f8d0ca42" Oct 08 08:12:43 crc kubenswrapper[4693]: I1008 08:12:43.704454 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"237cd2b09a961205d9f416d9d8558063a495d0e695a43d44eee53f04f8d0ca42"} err="failed to get container status \"237cd2b09a961205d9f416d9d8558063a495d0e695a43d44eee53f04f8d0ca42\": rpc error: code = NotFound desc = could not find container \"237cd2b09a961205d9f416d9d8558063a495d0e695a43d44eee53f04f8d0ca42\": container with ID starting with 237cd2b09a961205d9f416d9d8558063a495d0e695a43d44eee53f04f8d0ca42 not found: ID does not exist" Oct 08 08:12:43 crc kubenswrapper[4693]: I1008 08:12:43.704490 4693 scope.go:117] "RemoveContainer" containerID="4cbdd8b13548c1e1cfd87f02d4f015b0f45de67ab3f35465aace29f83259da05" Oct 08 08:12:43 crc kubenswrapper[4693]: E1008 08:12:43.704997 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4cbdd8b13548c1e1cfd87f02d4f015b0f45de67ab3f35465aace29f83259da05\": container with ID starting with 4cbdd8b13548c1e1cfd87f02d4f015b0f45de67ab3f35465aace29f83259da05 not found: ID does not exist" containerID="4cbdd8b13548c1e1cfd87f02d4f015b0f45de67ab3f35465aace29f83259da05" Oct 08 08:12:43 crc kubenswrapper[4693]: I1008 08:12:43.705023 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4cbdd8b13548c1e1cfd87f02d4f015b0f45de67ab3f35465aace29f83259da05"} err="failed to get container status \"4cbdd8b13548c1e1cfd87f02d4f015b0f45de67ab3f35465aace29f83259da05\": rpc error: code = NotFound desc = could not find container \"4cbdd8b13548c1e1cfd87f02d4f015b0f45de67ab3f35465aace29f83259da05\": container with ID starting with 4cbdd8b13548c1e1cfd87f02d4f015b0f45de67ab3f35465aace29f83259da05 not found: ID does not exist" Oct 08 08:12:43 crc kubenswrapper[4693]: I1008 08:12:43.705042 4693 scope.go:117] "RemoveContainer" containerID="eb98d2bd9d5ab16c510ea462bb5ad06893a735d54f90e51b25d626ad38004c6a" Oct 08 08:12:43 crc kubenswrapper[4693]: E1008 08:12:43.705399 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb98d2bd9d5ab16c510ea462bb5ad06893a735d54f90e51b25d626ad38004c6a\": container with ID starting with eb98d2bd9d5ab16c510ea462bb5ad06893a735d54f90e51b25d626ad38004c6a not found: ID does not exist" containerID="eb98d2bd9d5ab16c510ea462bb5ad06893a735d54f90e51b25d626ad38004c6a" Oct 08 08:12:43 crc kubenswrapper[4693]: I1008 08:12:43.705431 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb98d2bd9d5ab16c510ea462bb5ad06893a735d54f90e51b25d626ad38004c6a"} err="failed to get container status \"eb98d2bd9d5ab16c510ea462bb5ad06893a735d54f90e51b25d626ad38004c6a\": rpc error: code = NotFound desc = could not find container \"eb98d2bd9d5ab16c510ea462bb5ad06893a735d54f90e51b25d626ad38004c6a\": container with ID starting with eb98d2bd9d5ab16c510ea462bb5ad06893a735d54f90e51b25d626ad38004c6a not found: ID does not exist" Oct 08 08:12:45 crc kubenswrapper[4693]: I1008 08:12:45.413235 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3835018e-03f3-4104-b0d4-caad159f7a43" path="/var/lib/kubelet/pods/3835018e-03f3-4104-b0d4-caad159f7a43/volumes" Oct 08 08:12:53 crc kubenswrapper[4693]: I1008 08:12:53.490121 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 08:12:53 crc kubenswrapper[4693]: I1008 08:12:53.490960 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 08:12:53 crc kubenswrapper[4693]: I1008 08:12:53.491033 4693 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 08:12:53 crc kubenswrapper[4693]: I1008 08:12:53.491927 4693 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6c7bb5f0ad5db721978259349bf27e3d7356eacafd118f6db51f957705571e0c"} pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 08 08:12:53 crc kubenswrapper[4693]: I1008 08:12:53.492054 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" containerID="cri-o://6c7bb5f0ad5db721978259349bf27e3d7356eacafd118f6db51f957705571e0c" gracePeriod=600 Oct 08 08:12:53 crc kubenswrapper[4693]: I1008 08:12:53.693360 4693 generic.go:334] "Generic (PLEG): container finished" podID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerID="6c7bb5f0ad5db721978259349bf27e3d7356eacafd118f6db51f957705571e0c" exitCode=0 Oct 08 08:12:53 crc kubenswrapper[4693]: I1008 08:12:53.693429 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerDied","Data":"6c7bb5f0ad5db721978259349bf27e3d7356eacafd118f6db51f957705571e0c"} Oct 08 08:12:53 crc kubenswrapper[4693]: I1008 08:12:53.693733 4693 scope.go:117] "RemoveContainer" containerID="288d548af23b29a7cdaf7d2835332d41a61423754a2a616ff1051b00333f1988" Oct 08 08:12:54 crc kubenswrapper[4693]: I1008 08:12:54.707702 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerStarted","Data":"28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b"} Oct 08 08:14:32 crc kubenswrapper[4693]: I1008 08:14:32.527177 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qrtbn"] Oct 08 08:14:32 crc kubenswrapper[4693]: E1008 08:14:32.528168 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3835018e-03f3-4104-b0d4-caad159f7a43" containerName="registry-server" Oct 08 08:14:32 crc kubenswrapper[4693]: I1008 08:14:32.528182 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="3835018e-03f3-4104-b0d4-caad159f7a43" containerName="registry-server" Oct 08 08:14:32 crc kubenswrapper[4693]: E1008 08:14:32.528209 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3835018e-03f3-4104-b0d4-caad159f7a43" containerName="extract-content" Oct 08 08:14:32 crc kubenswrapper[4693]: I1008 08:14:32.528216 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="3835018e-03f3-4104-b0d4-caad159f7a43" containerName="extract-content" Oct 08 08:14:32 crc kubenswrapper[4693]: E1008 08:14:32.528235 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3ca770e-1853-48e2-8a2b-8192c16ffa78" containerName="registry-server" Oct 08 08:14:32 crc kubenswrapper[4693]: I1008 08:14:32.528242 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3ca770e-1853-48e2-8a2b-8192c16ffa78" containerName="registry-server" Oct 08 08:14:32 crc kubenswrapper[4693]: E1008 08:14:32.528253 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3ca770e-1853-48e2-8a2b-8192c16ffa78" containerName="extract-utilities" Oct 08 08:14:32 crc kubenswrapper[4693]: I1008 08:14:32.528260 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3ca770e-1853-48e2-8a2b-8192c16ffa78" containerName="extract-utilities" Oct 08 08:14:32 crc kubenswrapper[4693]: E1008 08:14:32.528276 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3ca770e-1853-48e2-8a2b-8192c16ffa78" containerName="extract-content" Oct 08 08:14:32 crc kubenswrapper[4693]: I1008 08:14:32.528281 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3ca770e-1853-48e2-8a2b-8192c16ffa78" containerName="extract-content" Oct 08 08:14:32 crc kubenswrapper[4693]: E1008 08:14:32.528289 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3835018e-03f3-4104-b0d4-caad159f7a43" containerName="extract-utilities" Oct 08 08:14:32 crc kubenswrapper[4693]: I1008 08:14:32.528294 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="3835018e-03f3-4104-b0d4-caad159f7a43" containerName="extract-utilities" Oct 08 08:14:32 crc kubenswrapper[4693]: I1008 08:14:32.528477 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="3835018e-03f3-4104-b0d4-caad159f7a43" containerName="registry-server" Oct 08 08:14:32 crc kubenswrapper[4693]: I1008 08:14:32.528489 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3ca770e-1853-48e2-8a2b-8192c16ffa78" containerName="registry-server" Oct 08 08:14:32 crc kubenswrapper[4693]: I1008 08:14:32.529793 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qrtbn" Oct 08 08:14:32 crc kubenswrapper[4693]: I1008 08:14:32.571208 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qrtbn"] Oct 08 08:14:32 crc kubenswrapper[4693]: I1008 08:14:32.600643 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9ba2b13-c93b-4a77-baba-725c6365150d-catalog-content\") pod \"community-operators-qrtbn\" (UID: \"f9ba2b13-c93b-4a77-baba-725c6365150d\") " pod="openshift-marketplace/community-operators-qrtbn" Oct 08 08:14:32 crc kubenswrapper[4693]: I1008 08:14:32.601113 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9ba2b13-c93b-4a77-baba-725c6365150d-utilities\") pod \"community-operators-qrtbn\" (UID: \"f9ba2b13-c93b-4a77-baba-725c6365150d\") " pod="openshift-marketplace/community-operators-qrtbn" Oct 08 08:14:32 crc kubenswrapper[4693]: I1008 08:14:32.601225 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ns8w\" (UniqueName: \"kubernetes.io/projected/f9ba2b13-c93b-4a77-baba-725c6365150d-kube-api-access-8ns8w\") pod \"community-operators-qrtbn\" (UID: \"f9ba2b13-c93b-4a77-baba-725c6365150d\") " pod="openshift-marketplace/community-operators-qrtbn" Oct 08 08:14:32 crc kubenswrapper[4693]: I1008 08:14:32.703217 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9ba2b13-c93b-4a77-baba-725c6365150d-catalog-content\") pod \"community-operators-qrtbn\" (UID: \"f9ba2b13-c93b-4a77-baba-725c6365150d\") " pod="openshift-marketplace/community-operators-qrtbn" Oct 08 08:14:32 crc kubenswrapper[4693]: I1008 08:14:32.703396 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9ba2b13-c93b-4a77-baba-725c6365150d-utilities\") pod \"community-operators-qrtbn\" (UID: \"f9ba2b13-c93b-4a77-baba-725c6365150d\") " pod="openshift-marketplace/community-operators-qrtbn" Oct 08 08:14:32 crc kubenswrapper[4693]: I1008 08:14:32.703716 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9ba2b13-c93b-4a77-baba-725c6365150d-catalog-content\") pod \"community-operators-qrtbn\" (UID: \"f9ba2b13-c93b-4a77-baba-725c6365150d\") " pod="openshift-marketplace/community-operators-qrtbn" Oct 08 08:14:32 crc kubenswrapper[4693]: I1008 08:14:32.703994 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9ba2b13-c93b-4a77-baba-725c6365150d-utilities\") pod \"community-operators-qrtbn\" (UID: \"f9ba2b13-c93b-4a77-baba-725c6365150d\") " pod="openshift-marketplace/community-operators-qrtbn" Oct 08 08:14:32 crc kubenswrapper[4693]: I1008 08:14:32.704269 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ns8w\" (UniqueName: \"kubernetes.io/projected/f9ba2b13-c93b-4a77-baba-725c6365150d-kube-api-access-8ns8w\") pod \"community-operators-qrtbn\" (UID: \"f9ba2b13-c93b-4a77-baba-725c6365150d\") " pod="openshift-marketplace/community-operators-qrtbn" Oct 08 08:14:32 crc kubenswrapper[4693]: I1008 08:14:32.730764 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ns8w\" (UniqueName: \"kubernetes.io/projected/f9ba2b13-c93b-4a77-baba-725c6365150d-kube-api-access-8ns8w\") pod \"community-operators-qrtbn\" (UID: \"f9ba2b13-c93b-4a77-baba-725c6365150d\") " pod="openshift-marketplace/community-operators-qrtbn" Oct 08 08:14:32 crc kubenswrapper[4693]: I1008 08:14:32.865721 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qrtbn" Oct 08 08:14:33 crc kubenswrapper[4693]: I1008 08:14:33.416372 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qrtbn"] Oct 08 08:14:33 crc kubenswrapper[4693]: I1008 08:14:33.883052 4693 generic.go:334] "Generic (PLEG): container finished" podID="f9ba2b13-c93b-4a77-baba-725c6365150d" containerID="ccf038163934c6f5cd367d0acd7ef30f38531d6808205ff700e8a5e20f8f6f77" exitCode=0 Oct 08 08:14:33 crc kubenswrapper[4693]: I1008 08:14:33.883112 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qrtbn" event={"ID":"f9ba2b13-c93b-4a77-baba-725c6365150d","Type":"ContainerDied","Data":"ccf038163934c6f5cd367d0acd7ef30f38531d6808205ff700e8a5e20f8f6f77"} Oct 08 08:14:33 crc kubenswrapper[4693]: I1008 08:14:33.883307 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qrtbn" event={"ID":"f9ba2b13-c93b-4a77-baba-725c6365150d","Type":"ContainerStarted","Data":"a5ffa54eaaef76f92f325f63fe85b11bdd4c56b96405599281e23fec72720935"} Oct 08 08:14:35 crc kubenswrapper[4693]: I1008 08:14:35.910032 4693 generic.go:334] "Generic (PLEG): container finished" podID="f9ba2b13-c93b-4a77-baba-725c6365150d" containerID="782748b23887009c0a0411f945266d3c39a912c5112bf66f9a384c523bd463ec" exitCode=0 Oct 08 08:14:35 crc kubenswrapper[4693]: I1008 08:14:35.910157 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qrtbn" event={"ID":"f9ba2b13-c93b-4a77-baba-725c6365150d","Type":"ContainerDied","Data":"782748b23887009c0a0411f945266d3c39a912c5112bf66f9a384c523bd463ec"} Oct 08 08:14:36 crc kubenswrapper[4693]: I1008 08:14:36.922510 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qrtbn" event={"ID":"f9ba2b13-c93b-4a77-baba-725c6365150d","Type":"ContainerStarted","Data":"e66993156e9ea318db479124e753bf16f5413d31e725658b66a05d641769c64a"} Oct 08 08:14:36 crc kubenswrapper[4693]: I1008 08:14:36.975423 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qrtbn" podStartSLOduration=2.377186955 podStartE2EDuration="4.975388122s" podCreationTimestamp="2025-10-08 08:14:32 +0000 UTC" firstStartedPulling="2025-10-08 08:14:33.885276755 +0000 UTC m=+3459.256241700" lastFinishedPulling="2025-10-08 08:14:36.483477892 +0000 UTC m=+3461.854442867" observedRunningTime="2025-10-08 08:14:36.951544163 +0000 UTC m=+3462.322509138" watchObservedRunningTime="2025-10-08 08:14:36.975388122 +0000 UTC m=+3462.346353097" Oct 08 08:14:42 crc kubenswrapper[4693]: I1008 08:14:42.866953 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qrtbn" Oct 08 08:14:42 crc kubenswrapper[4693]: I1008 08:14:42.867634 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qrtbn" Oct 08 08:14:42 crc kubenswrapper[4693]: I1008 08:14:42.956034 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qrtbn" Oct 08 08:14:43 crc kubenswrapper[4693]: I1008 08:14:43.084625 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qrtbn" Oct 08 08:14:43 crc kubenswrapper[4693]: I1008 08:14:43.203056 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qrtbn"] Oct 08 08:14:45 crc kubenswrapper[4693]: I1008 08:14:45.040655 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-qrtbn" podUID="f9ba2b13-c93b-4a77-baba-725c6365150d" containerName="registry-server" containerID="cri-o://e66993156e9ea318db479124e753bf16f5413d31e725658b66a05d641769c64a" gracePeriod=2 Oct 08 08:14:45 crc kubenswrapper[4693]: I1008 08:14:45.594129 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qrtbn" Oct 08 08:14:45 crc kubenswrapper[4693]: I1008 08:14:45.690619 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9ba2b13-c93b-4a77-baba-725c6365150d-catalog-content\") pod \"f9ba2b13-c93b-4a77-baba-725c6365150d\" (UID: \"f9ba2b13-c93b-4a77-baba-725c6365150d\") " Oct 08 08:14:45 crc kubenswrapper[4693]: I1008 08:14:45.690695 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9ba2b13-c93b-4a77-baba-725c6365150d-utilities\") pod \"f9ba2b13-c93b-4a77-baba-725c6365150d\" (UID: \"f9ba2b13-c93b-4a77-baba-725c6365150d\") " Oct 08 08:14:45 crc kubenswrapper[4693]: I1008 08:14:45.690841 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8ns8w\" (UniqueName: \"kubernetes.io/projected/f9ba2b13-c93b-4a77-baba-725c6365150d-kube-api-access-8ns8w\") pod \"f9ba2b13-c93b-4a77-baba-725c6365150d\" (UID: \"f9ba2b13-c93b-4a77-baba-725c6365150d\") " Oct 08 08:14:45 crc kubenswrapper[4693]: I1008 08:14:45.691808 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9ba2b13-c93b-4a77-baba-725c6365150d-utilities" (OuterVolumeSpecName: "utilities") pod "f9ba2b13-c93b-4a77-baba-725c6365150d" (UID: "f9ba2b13-c93b-4a77-baba-725c6365150d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:14:45 crc kubenswrapper[4693]: I1008 08:14:45.696392 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9ba2b13-c93b-4a77-baba-725c6365150d-kube-api-access-8ns8w" (OuterVolumeSpecName: "kube-api-access-8ns8w") pod "f9ba2b13-c93b-4a77-baba-725c6365150d" (UID: "f9ba2b13-c93b-4a77-baba-725c6365150d"). InnerVolumeSpecName "kube-api-access-8ns8w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 08:14:45 crc kubenswrapper[4693]: I1008 08:14:45.792998 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9ba2b13-c93b-4a77-baba-725c6365150d-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 08:14:45 crc kubenswrapper[4693]: I1008 08:14:45.793025 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8ns8w\" (UniqueName: \"kubernetes.io/projected/f9ba2b13-c93b-4a77-baba-725c6365150d-kube-api-access-8ns8w\") on node \"crc\" DevicePath \"\"" Oct 08 08:14:45 crc kubenswrapper[4693]: I1008 08:14:45.943726 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9ba2b13-c93b-4a77-baba-725c6365150d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f9ba2b13-c93b-4a77-baba-725c6365150d" (UID: "f9ba2b13-c93b-4a77-baba-725c6365150d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:14:45 crc kubenswrapper[4693]: I1008 08:14:45.997028 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9ba2b13-c93b-4a77-baba-725c6365150d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 08:14:46 crc kubenswrapper[4693]: I1008 08:14:46.049935 4693 generic.go:334] "Generic (PLEG): container finished" podID="f9ba2b13-c93b-4a77-baba-725c6365150d" containerID="e66993156e9ea318db479124e753bf16f5413d31e725658b66a05d641769c64a" exitCode=0 Oct 08 08:14:46 crc kubenswrapper[4693]: I1008 08:14:46.049976 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qrtbn" event={"ID":"f9ba2b13-c93b-4a77-baba-725c6365150d","Type":"ContainerDied","Data":"e66993156e9ea318db479124e753bf16f5413d31e725658b66a05d641769c64a"} Oct 08 08:14:46 crc kubenswrapper[4693]: I1008 08:14:46.050007 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qrtbn" event={"ID":"f9ba2b13-c93b-4a77-baba-725c6365150d","Type":"ContainerDied","Data":"a5ffa54eaaef76f92f325f63fe85b11bdd4c56b96405599281e23fec72720935"} Oct 08 08:14:46 crc kubenswrapper[4693]: I1008 08:14:46.050026 4693 scope.go:117] "RemoveContainer" containerID="e66993156e9ea318db479124e753bf16f5413d31e725658b66a05d641769c64a" Oct 08 08:14:46 crc kubenswrapper[4693]: I1008 08:14:46.050027 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qrtbn" Oct 08 08:14:46 crc kubenswrapper[4693]: I1008 08:14:46.087618 4693 scope.go:117] "RemoveContainer" containerID="782748b23887009c0a0411f945266d3c39a912c5112bf66f9a384c523bd463ec" Oct 08 08:14:46 crc kubenswrapper[4693]: I1008 08:14:46.088569 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qrtbn"] Oct 08 08:14:46 crc kubenswrapper[4693]: I1008 08:14:46.104153 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-qrtbn"] Oct 08 08:14:46 crc kubenswrapper[4693]: I1008 08:14:46.111899 4693 scope.go:117] "RemoveContainer" containerID="ccf038163934c6f5cd367d0acd7ef30f38531d6808205ff700e8a5e20f8f6f77" Oct 08 08:14:46 crc kubenswrapper[4693]: I1008 08:14:46.154984 4693 scope.go:117] "RemoveContainer" containerID="e66993156e9ea318db479124e753bf16f5413d31e725658b66a05d641769c64a" Oct 08 08:14:46 crc kubenswrapper[4693]: E1008 08:14:46.155728 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e66993156e9ea318db479124e753bf16f5413d31e725658b66a05d641769c64a\": container with ID starting with e66993156e9ea318db479124e753bf16f5413d31e725658b66a05d641769c64a not found: ID does not exist" containerID="e66993156e9ea318db479124e753bf16f5413d31e725658b66a05d641769c64a" Oct 08 08:14:46 crc kubenswrapper[4693]: I1008 08:14:46.155767 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e66993156e9ea318db479124e753bf16f5413d31e725658b66a05d641769c64a"} err="failed to get container status \"e66993156e9ea318db479124e753bf16f5413d31e725658b66a05d641769c64a\": rpc error: code = NotFound desc = could not find container \"e66993156e9ea318db479124e753bf16f5413d31e725658b66a05d641769c64a\": container with ID starting with e66993156e9ea318db479124e753bf16f5413d31e725658b66a05d641769c64a not found: ID does not exist" Oct 08 08:14:46 crc kubenswrapper[4693]: I1008 08:14:46.155791 4693 scope.go:117] "RemoveContainer" containerID="782748b23887009c0a0411f945266d3c39a912c5112bf66f9a384c523bd463ec" Oct 08 08:14:46 crc kubenswrapper[4693]: E1008 08:14:46.156572 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"782748b23887009c0a0411f945266d3c39a912c5112bf66f9a384c523bd463ec\": container with ID starting with 782748b23887009c0a0411f945266d3c39a912c5112bf66f9a384c523bd463ec not found: ID does not exist" containerID="782748b23887009c0a0411f945266d3c39a912c5112bf66f9a384c523bd463ec" Oct 08 08:14:46 crc kubenswrapper[4693]: I1008 08:14:46.156596 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"782748b23887009c0a0411f945266d3c39a912c5112bf66f9a384c523bd463ec"} err="failed to get container status \"782748b23887009c0a0411f945266d3c39a912c5112bf66f9a384c523bd463ec\": rpc error: code = NotFound desc = could not find container \"782748b23887009c0a0411f945266d3c39a912c5112bf66f9a384c523bd463ec\": container with ID starting with 782748b23887009c0a0411f945266d3c39a912c5112bf66f9a384c523bd463ec not found: ID does not exist" Oct 08 08:14:46 crc kubenswrapper[4693]: I1008 08:14:46.156612 4693 scope.go:117] "RemoveContainer" containerID="ccf038163934c6f5cd367d0acd7ef30f38531d6808205ff700e8a5e20f8f6f77" Oct 08 08:14:46 crc kubenswrapper[4693]: E1008 08:14:46.156833 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ccf038163934c6f5cd367d0acd7ef30f38531d6808205ff700e8a5e20f8f6f77\": container with ID starting with ccf038163934c6f5cd367d0acd7ef30f38531d6808205ff700e8a5e20f8f6f77 not found: ID does not exist" containerID="ccf038163934c6f5cd367d0acd7ef30f38531d6808205ff700e8a5e20f8f6f77" Oct 08 08:14:46 crc kubenswrapper[4693]: I1008 08:14:46.156854 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccf038163934c6f5cd367d0acd7ef30f38531d6808205ff700e8a5e20f8f6f77"} err="failed to get container status \"ccf038163934c6f5cd367d0acd7ef30f38531d6808205ff700e8a5e20f8f6f77\": rpc error: code = NotFound desc = could not find container \"ccf038163934c6f5cd367d0acd7ef30f38531d6808205ff700e8a5e20f8f6f77\": container with ID starting with ccf038163934c6f5cd367d0acd7ef30f38531d6808205ff700e8a5e20f8f6f77 not found: ID does not exist" Oct 08 08:14:47 crc kubenswrapper[4693]: I1008 08:14:47.375782 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9ba2b13-c93b-4a77-baba-725c6365150d" path="/var/lib/kubelet/pods/f9ba2b13-c93b-4a77-baba-725c6365150d/volumes" Oct 08 08:14:53 crc kubenswrapper[4693]: I1008 08:14:53.489915 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 08:14:53 crc kubenswrapper[4693]: I1008 08:14:53.490703 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 08:15:00 crc kubenswrapper[4693]: I1008 08:15:00.188546 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331855-6qzqr"] Oct 08 08:15:00 crc kubenswrapper[4693]: E1008 08:15:00.189571 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9ba2b13-c93b-4a77-baba-725c6365150d" containerName="extract-utilities" Oct 08 08:15:00 crc kubenswrapper[4693]: I1008 08:15:00.189588 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9ba2b13-c93b-4a77-baba-725c6365150d" containerName="extract-utilities" Oct 08 08:15:00 crc kubenswrapper[4693]: E1008 08:15:00.189624 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9ba2b13-c93b-4a77-baba-725c6365150d" containerName="registry-server" Oct 08 08:15:00 crc kubenswrapper[4693]: I1008 08:15:00.189634 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9ba2b13-c93b-4a77-baba-725c6365150d" containerName="registry-server" Oct 08 08:15:00 crc kubenswrapper[4693]: E1008 08:15:00.189657 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9ba2b13-c93b-4a77-baba-725c6365150d" containerName="extract-content" Oct 08 08:15:00 crc kubenswrapper[4693]: I1008 08:15:00.189666 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9ba2b13-c93b-4a77-baba-725c6365150d" containerName="extract-content" Oct 08 08:15:00 crc kubenswrapper[4693]: I1008 08:15:00.190556 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9ba2b13-c93b-4a77-baba-725c6365150d" containerName="registry-server" Oct 08 08:15:00 crc kubenswrapper[4693]: I1008 08:15:00.191323 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331855-6qzqr" Oct 08 08:15:00 crc kubenswrapper[4693]: I1008 08:15:00.196849 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 08 08:15:00 crc kubenswrapper[4693]: I1008 08:15:00.202660 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331855-6qzqr"] Oct 08 08:15:00 crc kubenswrapper[4693]: I1008 08:15:00.204058 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 08 08:15:00 crc kubenswrapper[4693]: I1008 08:15:00.320413 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b1c99bcb-5a53-48f5-815f-93a51db4268b-config-volume\") pod \"collect-profiles-29331855-6qzqr\" (UID: \"b1c99bcb-5a53-48f5-815f-93a51db4268b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331855-6qzqr" Oct 08 08:15:00 crc kubenswrapper[4693]: I1008 08:15:00.320701 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b1c99bcb-5a53-48f5-815f-93a51db4268b-secret-volume\") pod \"collect-profiles-29331855-6qzqr\" (UID: \"b1c99bcb-5a53-48f5-815f-93a51db4268b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331855-6qzqr" Oct 08 08:15:00 crc kubenswrapper[4693]: I1008 08:15:00.320914 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hs79w\" (UniqueName: \"kubernetes.io/projected/b1c99bcb-5a53-48f5-815f-93a51db4268b-kube-api-access-hs79w\") pod \"collect-profiles-29331855-6qzqr\" (UID: \"b1c99bcb-5a53-48f5-815f-93a51db4268b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331855-6qzqr" Oct 08 08:15:00 crc kubenswrapper[4693]: I1008 08:15:00.422529 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b1c99bcb-5a53-48f5-815f-93a51db4268b-secret-volume\") pod \"collect-profiles-29331855-6qzqr\" (UID: \"b1c99bcb-5a53-48f5-815f-93a51db4268b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331855-6qzqr" Oct 08 08:15:00 crc kubenswrapper[4693]: I1008 08:15:00.422625 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hs79w\" (UniqueName: \"kubernetes.io/projected/b1c99bcb-5a53-48f5-815f-93a51db4268b-kube-api-access-hs79w\") pod \"collect-profiles-29331855-6qzqr\" (UID: \"b1c99bcb-5a53-48f5-815f-93a51db4268b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331855-6qzqr" Oct 08 08:15:00 crc kubenswrapper[4693]: I1008 08:15:00.422738 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b1c99bcb-5a53-48f5-815f-93a51db4268b-config-volume\") pod \"collect-profiles-29331855-6qzqr\" (UID: \"b1c99bcb-5a53-48f5-815f-93a51db4268b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331855-6qzqr" Oct 08 08:15:00 crc kubenswrapper[4693]: I1008 08:15:00.423579 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b1c99bcb-5a53-48f5-815f-93a51db4268b-config-volume\") pod \"collect-profiles-29331855-6qzqr\" (UID: \"b1c99bcb-5a53-48f5-815f-93a51db4268b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331855-6qzqr" Oct 08 08:15:00 crc kubenswrapper[4693]: I1008 08:15:00.429516 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b1c99bcb-5a53-48f5-815f-93a51db4268b-secret-volume\") pod \"collect-profiles-29331855-6qzqr\" (UID: \"b1c99bcb-5a53-48f5-815f-93a51db4268b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331855-6qzqr" Oct 08 08:15:00 crc kubenswrapper[4693]: I1008 08:15:00.444357 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hs79w\" (UniqueName: \"kubernetes.io/projected/b1c99bcb-5a53-48f5-815f-93a51db4268b-kube-api-access-hs79w\") pod \"collect-profiles-29331855-6qzqr\" (UID: \"b1c99bcb-5a53-48f5-815f-93a51db4268b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331855-6qzqr" Oct 08 08:15:00 crc kubenswrapper[4693]: I1008 08:15:00.512209 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331855-6qzqr" Oct 08 08:15:01 crc kubenswrapper[4693]: I1008 08:15:01.005021 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331855-6qzqr"] Oct 08 08:15:01 crc kubenswrapper[4693]: I1008 08:15:01.212295 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331855-6qzqr" event={"ID":"b1c99bcb-5a53-48f5-815f-93a51db4268b","Type":"ContainerStarted","Data":"aa6afd2258b70f027b19cf956c5402b8fc8205892aa32c845d9750f257e19414"} Oct 08 08:15:02 crc kubenswrapper[4693]: I1008 08:15:02.248023 4693 generic.go:334] "Generic (PLEG): container finished" podID="b1c99bcb-5a53-48f5-815f-93a51db4268b" containerID="c38395a4658c73a7c37260c0a1d0660ab7d8e369bf4e1b333f40d09bd3df7cfb" exitCode=0 Oct 08 08:15:02 crc kubenswrapper[4693]: I1008 08:15:02.248364 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331855-6qzqr" event={"ID":"b1c99bcb-5a53-48f5-815f-93a51db4268b","Type":"ContainerDied","Data":"c38395a4658c73a7c37260c0a1d0660ab7d8e369bf4e1b333f40d09bd3df7cfb"} Oct 08 08:15:03 crc kubenswrapper[4693]: I1008 08:15:03.637797 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331855-6qzqr" Oct 08 08:15:03 crc kubenswrapper[4693]: I1008 08:15:03.750779 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b1c99bcb-5a53-48f5-815f-93a51db4268b-config-volume\") pod \"b1c99bcb-5a53-48f5-815f-93a51db4268b\" (UID: \"b1c99bcb-5a53-48f5-815f-93a51db4268b\") " Oct 08 08:15:03 crc kubenswrapper[4693]: I1008 08:15:03.750869 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b1c99bcb-5a53-48f5-815f-93a51db4268b-secret-volume\") pod \"b1c99bcb-5a53-48f5-815f-93a51db4268b\" (UID: \"b1c99bcb-5a53-48f5-815f-93a51db4268b\") " Oct 08 08:15:03 crc kubenswrapper[4693]: I1008 08:15:03.751019 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hs79w\" (UniqueName: \"kubernetes.io/projected/b1c99bcb-5a53-48f5-815f-93a51db4268b-kube-api-access-hs79w\") pod \"b1c99bcb-5a53-48f5-815f-93a51db4268b\" (UID: \"b1c99bcb-5a53-48f5-815f-93a51db4268b\") " Oct 08 08:15:03 crc kubenswrapper[4693]: I1008 08:15:03.752514 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1c99bcb-5a53-48f5-815f-93a51db4268b-config-volume" (OuterVolumeSpecName: "config-volume") pod "b1c99bcb-5a53-48f5-815f-93a51db4268b" (UID: "b1c99bcb-5a53-48f5-815f-93a51db4268b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 08:15:03 crc kubenswrapper[4693]: I1008 08:15:03.758753 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1c99bcb-5a53-48f5-815f-93a51db4268b-kube-api-access-hs79w" (OuterVolumeSpecName: "kube-api-access-hs79w") pod "b1c99bcb-5a53-48f5-815f-93a51db4268b" (UID: "b1c99bcb-5a53-48f5-815f-93a51db4268b"). InnerVolumeSpecName "kube-api-access-hs79w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 08:15:03 crc kubenswrapper[4693]: I1008 08:15:03.762046 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1c99bcb-5a53-48f5-815f-93a51db4268b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b1c99bcb-5a53-48f5-815f-93a51db4268b" (UID: "b1c99bcb-5a53-48f5-815f-93a51db4268b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 08:15:03 crc kubenswrapper[4693]: I1008 08:15:03.853010 4693 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b1c99bcb-5a53-48f5-815f-93a51db4268b-config-volume\") on node \"crc\" DevicePath \"\"" Oct 08 08:15:03 crc kubenswrapper[4693]: I1008 08:15:03.853051 4693 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b1c99bcb-5a53-48f5-815f-93a51db4268b-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 08 08:15:03 crc kubenswrapper[4693]: I1008 08:15:03.853060 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hs79w\" (UniqueName: \"kubernetes.io/projected/b1c99bcb-5a53-48f5-815f-93a51db4268b-kube-api-access-hs79w\") on node \"crc\" DevicePath \"\"" Oct 08 08:15:04 crc kubenswrapper[4693]: I1008 08:15:04.282884 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331855-6qzqr" event={"ID":"b1c99bcb-5a53-48f5-815f-93a51db4268b","Type":"ContainerDied","Data":"aa6afd2258b70f027b19cf956c5402b8fc8205892aa32c845d9750f257e19414"} Oct 08 08:15:04 crc kubenswrapper[4693]: I1008 08:15:04.282929 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aa6afd2258b70f027b19cf956c5402b8fc8205892aa32c845d9750f257e19414" Oct 08 08:15:04 crc kubenswrapper[4693]: I1008 08:15:04.282944 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331855-6qzqr" Oct 08 08:15:04 crc kubenswrapper[4693]: I1008 08:15:04.760495 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331810-cln5m"] Oct 08 08:15:04 crc kubenswrapper[4693]: I1008 08:15:04.783256 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331810-cln5m"] Oct 08 08:15:05 crc kubenswrapper[4693]: I1008 08:15:05.384082 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c332e0a-56dc-480e-a857-62665c11bbf9" path="/var/lib/kubelet/pods/2c332e0a-56dc-480e-a857-62665c11bbf9/volumes" Oct 08 08:15:19 crc kubenswrapper[4693]: I1008 08:15:19.974982 4693 scope.go:117] "RemoveContainer" containerID="9ea1368c3b1f4f4f6f0e7564adfd19cf07011c2063d4f1491826a5438e1f11dc" Oct 08 08:15:23 crc kubenswrapper[4693]: I1008 08:15:23.489697 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 08:15:23 crc kubenswrapper[4693]: I1008 08:15:23.490949 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 08:15:53 crc kubenswrapper[4693]: I1008 08:15:53.489778 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 08:15:53 crc kubenswrapper[4693]: I1008 08:15:53.490332 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 08:15:53 crc kubenswrapper[4693]: I1008 08:15:53.490374 4693 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 08:15:53 crc kubenswrapper[4693]: I1008 08:15:53.490963 4693 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b"} pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 08 08:15:53 crc kubenswrapper[4693]: I1008 08:15:53.491015 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" containerID="cri-o://28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" gracePeriod=600 Oct 08 08:15:53 crc kubenswrapper[4693]: E1008 08:15:53.631754 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:15:53 crc kubenswrapper[4693]: I1008 08:15:53.818186 4693 generic.go:334] "Generic (PLEG): container finished" podID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerID="28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" exitCode=0 Oct 08 08:15:53 crc kubenswrapper[4693]: I1008 08:15:53.818234 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerDied","Data":"28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b"} Oct 08 08:15:53 crc kubenswrapper[4693]: I1008 08:15:53.818271 4693 scope.go:117] "RemoveContainer" containerID="6c7bb5f0ad5db721978259349bf27e3d7356eacafd118f6db51f957705571e0c" Oct 08 08:15:53 crc kubenswrapper[4693]: I1008 08:15:53.819147 4693 scope.go:117] "RemoveContainer" containerID="28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" Oct 08 08:15:53 crc kubenswrapper[4693]: E1008 08:15:53.819760 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:16:02 crc kubenswrapper[4693]: I1008 08:16:02.936872 4693 generic.go:334] "Generic (PLEG): container finished" podID="125016ff-a340-49c8-8c6f-9eed2093e1af" containerID="fc2e2ef8b6be8325119cd78ac696438300077e42d809d9b2fe050f34b65695d9" exitCode=0 Oct 08 08:16:02 crc kubenswrapper[4693]: I1008 08:16:02.936976 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"125016ff-a340-49c8-8c6f-9eed2093e1af","Type":"ContainerDied","Data":"fc2e2ef8b6be8325119cd78ac696438300077e42d809d9b2fe050f34b65695d9"} Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.350253 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.514299 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/125016ff-a340-49c8-8c6f-9eed2093e1af-config-data\") pod \"125016ff-a340-49c8-8c6f-9eed2093e1af\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.514357 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/125016ff-a340-49c8-8c6f-9eed2093e1af-openstack-config\") pod \"125016ff-a340-49c8-8c6f-9eed2093e1af\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.514441 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/125016ff-a340-49c8-8c6f-9eed2093e1af-ca-certs\") pod \"125016ff-a340-49c8-8c6f-9eed2093e1af\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.514457 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/125016ff-a340-49c8-8c6f-9eed2093e1af-openstack-config-secret\") pod \"125016ff-a340-49c8-8c6f-9eed2093e1af\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.514507 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"125016ff-a340-49c8-8c6f-9eed2093e1af\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.514567 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/125016ff-a340-49c8-8c6f-9eed2093e1af-ssh-key\") pod \"125016ff-a340-49c8-8c6f-9eed2093e1af\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.514630 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/125016ff-a340-49c8-8c6f-9eed2093e1af-test-operator-ephemeral-workdir\") pod \"125016ff-a340-49c8-8c6f-9eed2093e1af\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.514679 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/125016ff-a340-49c8-8c6f-9eed2093e1af-test-operator-ephemeral-temporary\") pod \"125016ff-a340-49c8-8c6f-9eed2093e1af\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.514705 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hb9pq\" (UniqueName: \"kubernetes.io/projected/125016ff-a340-49c8-8c6f-9eed2093e1af-kube-api-access-hb9pq\") pod \"125016ff-a340-49c8-8c6f-9eed2093e1af\" (UID: \"125016ff-a340-49c8-8c6f-9eed2093e1af\") " Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.515639 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/125016ff-a340-49c8-8c6f-9eed2093e1af-config-data" (OuterVolumeSpecName: "config-data") pod "125016ff-a340-49c8-8c6f-9eed2093e1af" (UID: "125016ff-a340-49c8-8c6f-9eed2093e1af"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.517744 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/125016ff-a340-49c8-8c6f-9eed2093e1af-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "125016ff-a340-49c8-8c6f-9eed2093e1af" (UID: "125016ff-a340-49c8-8c6f-9eed2093e1af"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.522274 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/125016ff-a340-49c8-8c6f-9eed2093e1af-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "125016ff-a340-49c8-8c6f-9eed2093e1af" (UID: "125016ff-a340-49c8-8c6f-9eed2093e1af"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.525095 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "test-operator-logs") pod "125016ff-a340-49c8-8c6f-9eed2093e1af" (UID: "125016ff-a340-49c8-8c6f-9eed2093e1af"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.537796 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/125016ff-a340-49c8-8c6f-9eed2093e1af-kube-api-access-hb9pq" (OuterVolumeSpecName: "kube-api-access-hb9pq") pod "125016ff-a340-49c8-8c6f-9eed2093e1af" (UID: "125016ff-a340-49c8-8c6f-9eed2093e1af"). InnerVolumeSpecName "kube-api-access-hb9pq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.564069 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/125016ff-a340-49c8-8c6f-9eed2093e1af-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "125016ff-a340-49c8-8c6f-9eed2093e1af" (UID: "125016ff-a340-49c8-8c6f-9eed2093e1af"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.575629 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/125016ff-a340-49c8-8c6f-9eed2093e1af-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "125016ff-a340-49c8-8c6f-9eed2093e1af" (UID: "125016ff-a340-49c8-8c6f-9eed2093e1af"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.576103 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/125016ff-a340-49c8-8c6f-9eed2093e1af-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "125016ff-a340-49c8-8c6f-9eed2093e1af" (UID: "125016ff-a340-49c8-8c6f-9eed2093e1af"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.593964 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/125016ff-a340-49c8-8c6f-9eed2093e1af-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "125016ff-a340-49c8-8c6f-9eed2093e1af" (UID: "125016ff-a340-49c8-8c6f-9eed2093e1af"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.617753 4693 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/125016ff-a340-49c8-8c6f-9eed2093e1af-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.617795 4693 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/125016ff-a340-49c8-8c6f-9eed2093e1af-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.617832 4693 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/125016ff-a340-49c8-8c6f-9eed2093e1af-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.617853 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hb9pq\" (UniqueName: \"kubernetes.io/projected/125016ff-a340-49c8-8c6f-9eed2093e1af-kube-api-access-hb9pq\") on node \"crc\" DevicePath \"\"" Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.617871 4693 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/125016ff-a340-49c8-8c6f-9eed2093e1af-config-data\") on node \"crc\" DevicePath \"\"" Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.617888 4693 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/125016ff-a340-49c8-8c6f-9eed2093e1af-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.617971 4693 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/125016ff-a340-49c8-8c6f-9eed2093e1af-ca-certs\") on node \"crc\" DevicePath \"\"" Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.618025 4693 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/125016ff-a340-49c8-8c6f-9eed2093e1af-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.618078 4693 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.645616 4693 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.720574 4693 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.966441 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"125016ff-a340-49c8-8c6f-9eed2093e1af","Type":"ContainerDied","Data":"2fd3b4acfc02b790eb1306de225c05f442235f7485a858e3312ca5597dc63c33"} Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.966790 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2fd3b4acfc02b790eb1306de225c05f442235f7485a858e3312ca5597dc63c33" Oct 08 08:16:04 crc kubenswrapper[4693]: I1008 08:16:04.966661 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 08 08:16:06 crc kubenswrapper[4693]: I1008 08:16:06.363638 4693 scope.go:117] "RemoveContainer" containerID="28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" Oct 08 08:16:06 crc kubenswrapper[4693]: E1008 08:16:06.364117 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:16:12 crc kubenswrapper[4693]: I1008 08:16:12.471404 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 08 08:16:12 crc kubenswrapper[4693]: E1008 08:16:12.472934 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="125016ff-a340-49c8-8c6f-9eed2093e1af" containerName="tempest-tests-tempest-tests-runner" Oct 08 08:16:12 crc kubenswrapper[4693]: I1008 08:16:12.472966 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="125016ff-a340-49c8-8c6f-9eed2093e1af" containerName="tempest-tests-tempest-tests-runner" Oct 08 08:16:12 crc kubenswrapper[4693]: E1008 08:16:12.473029 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1c99bcb-5a53-48f5-815f-93a51db4268b" containerName="collect-profiles" Oct 08 08:16:12 crc kubenswrapper[4693]: I1008 08:16:12.473046 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1c99bcb-5a53-48f5-815f-93a51db4268b" containerName="collect-profiles" Oct 08 08:16:12 crc kubenswrapper[4693]: I1008 08:16:12.473477 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1c99bcb-5a53-48f5-815f-93a51db4268b" containerName="collect-profiles" Oct 08 08:16:12 crc kubenswrapper[4693]: I1008 08:16:12.473536 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="125016ff-a340-49c8-8c6f-9eed2093e1af" containerName="tempest-tests-tempest-tests-runner" Oct 08 08:16:12 crc kubenswrapper[4693]: I1008 08:16:12.474938 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 08 08:16:12 crc kubenswrapper[4693]: I1008 08:16:12.479986 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-mpx49" Oct 08 08:16:12 crc kubenswrapper[4693]: I1008 08:16:12.490392 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 08 08:16:12 crc kubenswrapper[4693]: I1008 08:16:12.577767 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lh47k\" (UniqueName: \"kubernetes.io/projected/fed84ccb-a071-4df4-a6d4-1e5c227a609c-kube-api-access-lh47k\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"fed84ccb-a071-4df4-a6d4-1e5c227a609c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 08 08:16:12 crc kubenswrapper[4693]: I1008 08:16:12.578217 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"fed84ccb-a071-4df4-a6d4-1e5c227a609c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 08 08:16:12 crc kubenswrapper[4693]: I1008 08:16:12.679884 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lh47k\" (UniqueName: \"kubernetes.io/projected/fed84ccb-a071-4df4-a6d4-1e5c227a609c-kube-api-access-lh47k\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"fed84ccb-a071-4df4-a6d4-1e5c227a609c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 08 08:16:12 crc kubenswrapper[4693]: I1008 08:16:12.679958 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"fed84ccb-a071-4df4-a6d4-1e5c227a609c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 08 08:16:12 crc kubenswrapper[4693]: I1008 08:16:12.680548 4693 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"fed84ccb-a071-4df4-a6d4-1e5c227a609c\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 08 08:16:12 crc kubenswrapper[4693]: I1008 08:16:12.706696 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lh47k\" (UniqueName: \"kubernetes.io/projected/fed84ccb-a071-4df4-a6d4-1e5c227a609c-kube-api-access-lh47k\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"fed84ccb-a071-4df4-a6d4-1e5c227a609c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 08 08:16:12 crc kubenswrapper[4693]: I1008 08:16:12.710804 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"fed84ccb-a071-4df4-a6d4-1e5c227a609c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 08 08:16:12 crc kubenswrapper[4693]: I1008 08:16:12.814035 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 08 08:16:13 crc kubenswrapper[4693]: I1008 08:16:13.305465 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 08 08:16:13 crc kubenswrapper[4693]: I1008 08:16:13.318152 4693 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 08 08:16:14 crc kubenswrapper[4693]: I1008 08:16:14.070169 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"fed84ccb-a071-4df4-a6d4-1e5c227a609c","Type":"ContainerStarted","Data":"89a5306b8f5e682688b8cee24981207150c7c405bcb906f2fa34d99ebf3cc688"} Oct 08 08:16:15 crc kubenswrapper[4693]: I1008 08:16:15.087908 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"fed84ccb-a071-4df4-a6d4-1e5c227a609c","Type":"ContainerStarted","Data":"8919113487cda64506c5500fbe920ff7593e3c5b51a4cc33327d1db8264fdcdd"} Oct 08 08:16:15 crc kubenswrapper[4693]: I1008 08:16:15.120860 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.149094052 podStartE2EDuration="3.120797463s" podCreationTimestamp="2025-10-08 08:16:12 +0000 UTC" firstStartedPulling="2025-10-08 08:16:13.317977848 +0000 UTC m=+3558.688942783" lastFinishedPulling="2025-10-08 08:16:14.289681219 +0000 UTC m=+3559.660646194" observedRunningTime="2025-10-08 08:16:15.104907966 +0000 UTC m=+3560.475872911" watchObservedRunningTime="2025-10-08 08:16:15.120797463 +0000 UTC m=+3560.491762428" Oct 08 08:16:20 crc kubenswrapper[4693]: I1008 08:16:20.362876 4693 scope.go:117] "RemoveContainer" containerID="28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" Oct 08 08:16:20 crc kubenswrapper[4693]: E1008 08:16:20.364020 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:16:32 crc kubenswrapper[4693]: I1008 08:16:32.030354 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-nth5x/must-gather-7r9gb"] Oct 08 08:16:32 crc kubenswrapper[4693]: I1008 08:16:32.032211 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nth5x/must-gather-7r9gb" Oct 08 08:16:32 crc kubenswrapper[4693]: I1008 08:16:32.035621 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-nth5x"/"openshift-service-ca.crt" Oct 08 08:16:32 crc kubenswrapper[4693]: I1008 08:16:32.036044 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-nth5x"/"kube-root-ca.crt" Oct 08 08:16:32 crc kubenswrapper[4693]: I1008 08:16:32.057691 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-nth5x/must-gather-7r9gb"] Oct 08 08:16:32 crc kubenswrapper[4693]: I1008 08:16:32.100400 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ac648e9b-4eb4-4f0c-a108-f103f385b3f8-must-gather-output\") pod \"must-gather-7r9gb\" (UID: \"ac648e9b-4eb4-4f0c-a108-f103f385b3f8\") " pod="openshift-must-gather-nth5x/must-gather-7r9gb" Oct 08 08:16:32 crc kubenswrapper[4693]: I1008 08:16:32.100613 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8brlx\" (UniqueName: \"kubernetes.io/projected/ac648e9b-4eb4-4f0c-a108-f103f385b3f8-kube-api-access-8brlx\") pod \"must-gather-7r9gb\" (UID: \"ac648e9b-4eb4-4f0c-a108-f103f385b3f8\") " pod="openshift-must-gather-nth5x/must-gather-7r9gb" Oct 08 08:16:32 crc kubenswrapper[4693]: I1008 08:16:32.202886 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ac648e9b-4eb4-4f0c-a108-f103f385b3f8-must-gather-output\") pod \"must-gather-7r9gb\" (UID: \"ac648e9b-4eb4-4f0c-a108-f103f385b3f8\") " pod="openshift-must-gather-nth5x/must-gather-7r9gb" Oct 08 08:16:32 crc kubenswrapper[4693]: I1008 08:16:32.203263 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8brlx\" (UniqueName: \"kubernetes.io/projected/ac648e9b-4eb4-4f0c-a108-f103f385b3f8-kube-api-access-8brlx\") pod \"must-gather-7r9gb\" (UID: \"ac648e9b-4eb4-4f0c-a108-f103f385b3f8\") " pod="openshift-must-gather-nth5x/must-gather-7r9gb" Oct 08 08:16:32 crc kubenswrapper[4693]: I1008 08:16:32.203917 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ac648e9b-4eb4-4f0c-a108-f103f385b3f8-must-gather-output\") pod \"must-gather-7r9gb\" (UID: \"ac648e9b-4eb4-4f0c-a108-f103f385b3f8\") " pod="openshift-must-gather-nth5x/must-gather-7r9gb" Oct 08 08:16:32 crc kubenswrapper[4693]: I1008 08:16:32.240801 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8brlx\" (UniqueName: \"kubernetes.io/projected/ac648e9b-4eb4-4f0c-a108-f103f385b3f8-kube-api-access-8brlx\") pod \"must-gather-7r9gb\" (UID: \"ac648e9b-4eb4-4f0c-a108-f103f385b3f8\") " pod="openshift-must-gather-nth5x/must-gather-7r9gb" Oct 08 08:16:32 crc kubenswrapper[4693]: I1008 08:16:32.353037 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nth5x/must-gather-7r9gb" Oct 08 08:16:32 crc kubenswrapper[4693]: I1008 08:16:32.363010 4693 scope.go:117] "RemoveContainer" containerID="28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" Oct 08 08:16:32 crc kubenswrapper[4693]: E1008 08:16:32.363242 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:16:32 crc kubenswrapper[4693]: I1008 08:16:32.806294 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-nth5x/must-gather-7r9gb"] Oct 08 08:16:32 crc kubenswrapper[4693]: W1008 08:16:32.807246 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podac648e9b_4eb4_4f0c_a108_f103f385b3f8.slice/crio-3acc527cd21f8500bbc2018df7c3d53a940d17adb1eb8b2b28e1eb5070c9603e WatchSource:0}: Error finding container 3acc527cd21f8500bbc2018df7c3d53a940d17adb1eb8b2b28e1eb5070c9603e: Status 404 returned error can't find the container with id 3acc527cd21f8500bbc2018df7c3d53a940d17adb1eb8b2b28e1eb5070c9603e Oct 08 08:16:33 crc kubenswrapper[4693]: I1008 08:16:33.292189 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nth5x/must-gather-7r9gb" event={"ID":"ac648e9b-4eb4-4f0c-a108-f103f385b3f8","Type":"ContainerStarted","Data":"3acc527cd21f8500bbc2018df7c3d53a940d17adb1eb8b2b28e1eb5070c9603e"} Oct 08 08:16:37 crc kubenswrapper[4693]: I1008 08:16:37.335087 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nth5x/must-gather-7r9gb" event={"ID":"ac648e9b-4eb4-4f0c-a108-f103f385b3f8","Type":"ContainerStarted","Data":"2fd2c11d873009e88ac755b29de042f07bec9d09dd6c299751f7f07787ffd4fd"} Oct 08 08:16:37 crc kubenswrapper[4693]: I1008 08:16:37.335758 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nth5x/must-gather-7r9gb" event={"ID":"ac648e9b-4eb4-4f0c-a108-f103f385b3f8","Type":"ContainerStarted","Data":"80c673d6a679f8844fe081b68ff0615be5c3fefe853aee848e86346889a3acc4"} Oct 08 08:16:37 crc kubenswrapper[4693]: I1008 08:16:37.354393 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-nth5x/must-gather-7r9gb" podStartSLOduration=1.55050106 podStartE2EDuration="5.354372671s" podCreationTimestamp="2025-10-08 08:16:32 +0000 UTC" firstStartedPulling="2025-10-08 08:16:32.809613083 +0000 UTC m=+3578.180578028" lastFinishedPulling="2025-10-08 08:16:36.613484704 +0000 UTC m=+3581.984449639" observedRunningTime="2025-10-08 08:16:37.351139646 +0000 UTC m=+3582.722104591" watchObservedRunningTime="2025-10-08 08:16:37.354372671 +0000 UTC m=+3582.725337606" Oct 08 08:16:40 crc kubenswrapper[4693]: I1008 08:16:40.504087 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-nth5x/crc-debug-vnsbh"] Oct 08 08:16:40 crc kubenswrapper[4693]: I1008 08:16:40.505567 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nth5x/crc-debug-vnsbh" Oct 08 08:16:40 crc kubenswrapper[4693]: I1008 08:16:40.510521 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-nth5x"/"default-dockercfg-72qn5" Oct 08 08:16:40 crc kubenswrapper[4693]: I1008 08:16:40.580741 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lg2tt\" (UniqueName: \"kubernetes.io/projected/8e32391b-049b-46ce-beb4-2a08fb1c6817-kube-api-access-lg2tt\") pod \"crc-debug-vnsbh\" (UID: \"8e32391b-049b-46ce-beb4-2a08fb1c6817\") " pod="openshift-must-gather-nth5x/crc-debug-vnsbh" Oct 08 08:16:40 crc kubenswrapper[4693]: I1008 08:16:40.580911 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8e32391b-049b-46ce-beb4-2a08fb1c6817-host\") pod \"crc-debug-vnsbh\" (UID: \"8e32391b-049b-46ce-beb4-2a08fb1c6817\") " pod="openshift-must-gather-nth5x/crc-debug-vnsbh" Oct 08 08:16:40 crc kubenswrapper[4693]: I1008 08:16:40.682245 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8e32391b-049b-46ce-beb4-2a08fb1c6817-host\") pod \"crc-debug-vnsbh\" (UID: \"8e32391b-049b-46ce-beb4-2a08fb1c6817\") " pod="openshift-must-gather-nth5x/crc-debug-vnsbh" Oct 08 08:16:40 crc kubenswrapper[4693]: I1008 08:16:40.682396 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lg2tt\" (UniqueName: \"kubernetes.io/projected/8e32391b-049b-46ce-beb4-2a08fb1c6817-kube-api-access-lg2tt\") pod \"crc-debug-vnsbh\" (UID: \"8e32391b-049b-46ce-beb4-2a08fb1c6817\") " pod="openshift-must-gather-nth5x/crc-debug-vnsbh" Oct 08 08:16:40 crc kubenswrapper[4693]: I1008 08:16:40.682434 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8e32391b-049b-46ce-beb4-2a08fb1c6817-host\") pod \"crc-debug-vnsbh\" (UID: \"8e32391b-049b-46ce-beb4-2a08fb1c6817\") " pod="openshift-must-gather-nth5x/crc-debug-vnsbh" Oct 08 08:16:40 crc kubenswrapper[4693]: I1008 08:16:40.707458 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lg2tt\" (UniqueName: \"kubernetes.io/projected/8e32391b-049b-46ce-beb4-2a08fb1c6817-kube-api-access-lg2tt\") pod \"crc-debug-vnsbh\" (UID: \"8e32391b-049b-46ce-beb4-2a08fb1c6817\") " pod="openshift-must-gather-nth5x/crc-debug-vnsbh" Oct 08 08:16:40 crc kubenswrapper[4693]: I1008 08:16:40.826556 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nth5x/crc-debug-vnsbh" Oct 08 08:16:40 crc kubenswrapper[4693]: W1008 08:16:40.899087 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8e32391b_049b_46ce_beb4_2a08fb1c6817.slice/crio-be2339ab4ccf7cc1fcabdca1589f2ff6898db237d05723e142de0b93aaa18e9e WatchSource:0}: Error finding container be2339ab4ccf7cc1fcabdca1589f2ff6898db237d05723e142de0b93aaa18e9e: Status 404 returned error can't find the container with id be2339ab4ccf7cc1fcabdca1589f2ff6898db237d05723e142de0b93aaa18e9e Oct 08 08:16:41 crc kubenswrapper[4693]: I1008 08:16:41.381650 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nth5x/crc-debug-vnsbh" event={"ID":"8e32391b-049b-46ce-beb4-2a08fb1c6817","Type":"ContainerStarted","Data":"be2339ab4ccf7cc1fcabdca1589f2ff6898db237d05723e142de0b93aaa18e9e"} Oct 08 08:16:46 crc kubenswrapper[4693]: I1008 08:16:46.363453 4693 scope.go:117] "RemoveContainer" containerID="28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" Oct 08 08:16:46 crc kubenswrapper[4693]: E1008 08:16:46.364300 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:16:51 crc kubenswrapper[4693]: I1008 08:16:51.487907 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nth5x/crc-debug-vnsbh" event={"ID":"8e32391b-049b-46ce-beb4-2a08fb1c6817","Type":"ContainerStarted","Data":"76d84a07018901c6349af1c520ed7155f8bdb4b872a22d8a26222a3aec6e45b4"} Oct 08 08:16:51 crc kubenswrapper[4693]: I1008 08:16:51.508471 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-nth5x/crc-debug-vnsbh" podStartSLOduration=1.30263672 podStartE2EDuration="11.508445292s" podCreationTimestamp="2025-10-08 08:16:40 +0000 UTC" firstStartedPulling="2025-10-08 08:16:40.903193951 +0000 UTC m=+3586.274158886" lastFinishedPulling="2025-10-08 08:16:51.109002523 +0000 UTC m=+3596.479967458" observedRunningTime="2025-10-08 08:16:51.501335265 +0000 UTC m=+3596.872300210" watchObservedRunningTime="2025-10-08 08:16:51.508445292 +0000 UTC m=+3596.879410237" Oct 08 08:16:59 crc kubenswrapper[4693]: I1008 08:16:59.363223 4693 scope.go:117] "RemoveContainer" containerID="28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" Oct 08 08:16:59 crc kubenswrapper[4693]: E1008 08:16:59.364175 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:17:10 crc kubenswrapper[4693]: I1008 08:17:10.362526 4693 scope.go:117] "RemoveContainer" containerID="28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" Oct 08 08:17:10 crc kubenswrapper[4693]: E1008 08:17:10.364325 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:17:23 crc kubenswrapper[4693]: I1008 08:17:23.364084 4693 scope.go:117] "RemoveContainer" containerID="28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" Oct 08 08:17:23 crc kubenswrapper[4693]: E1008 08:17:23.364710 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:17:35 crc kubenswrapper[4693]: I1008 08:17:35.367963 4693 scope.go:117] "RemoveContainer" containerID="28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" Oct 08 08:17:35 crc kubenswrapper[4693]: E1008 08:17:35.368950 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:17:40 crc kubenswrapper[4693]: I1008 08:17:40.390953 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-78f9f7b5dd-wdpgb_bae6b42f-5a3c-4568-b8db-84be1514827e/barbican-api/0.log" Oct 08 08:17:40 crc kubenswrapper[4693]: I1008 08:17:40.395923 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-78f9f7b5dd-wdpgb_bae6b42f-5a3c-4568-b8db-84be1514827e/barbican-api-log/0.log" Oct 08 08:17:40 crc kubenswrapper[4693]: I1008 08:17:40.595444 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7d588c7bd-f7dbq_f3bebe35-d072-4368-ba55-d8415a4f44ef/barbican-keystone-listener/0.log" Oct 08 08:17:40 crc kubenswrapper[4693]: I1008 08:17:40.682182 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7d588c7bd-f7dbq_f3bebe35-d072-4368-ba55-d8415a4f44ef/barbican-keystone-listener-log/0.log" Oct 08 08:17:40 crc kubenswrapper[4693]: I1008 08:17:40.834349 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-69584f958c-5wpzz_f74783b1-e062-4c4f-82eb-a7df2387913d/barbican-worker/0.log" Oct 08 08:17:40 crc kubenswrapper[4693]: I1008 08:17:40.869948 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-69584f958c-5wpzz_f74783b1-e062-4c4f-82eb-a7df2387913d/barbican-worker-log/0.log" Oct 08 08:17:41 crc kubenswrapper[4693]: I1008 08:17:41.076341 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz_ac6cb698-ba08-46e2-a8ae-557f656d3209/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:17:41 crc kubenswrapper[4693]: I1008 08:17:41.260703 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_68fc1107-d02a-4138-bd74-648778e9302d/ceilometer-central-agent/0.log" Oct 08 08:17:41 crc kubenswrapper[4693]: I1008 08:17:41.286441 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_68fc1107-d02a-4138-bd74-648778e9302d/ceilometer-notification-agent/0.log" Oct 08 08:17:41 crc kubenswrapper[4693]: I1008 08:17:41.352879 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_68fc1107-d02a-4138-bd74-648778e9302d/proxy-httpd/0.log" Oct 08 08:17:41 crc kubenswrapper[4693]: I1008 08:17:41.477504 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_68fc1107-d02a-4138-bd74-648778e9302d/sg-core/0.log" Oct 08 08:17:41 crc kubenswrapper[4693]: I1008 08:17:41.619037 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_ebd0e852-8dca-49c3-9af2-00f4d652216e/cinder-api/0.log" Oct 08 08:17:41 crc kubenswrapper[4693]: I1008 08:17:41.688585 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_ebd0e852-8dca-49c3-9af2-00f4d652216e/cinder-api-log/0.log" Oct 08 08:17:41 crc kubenswrapper[4693]: I1008 08:17:41.947703 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_3b12dbfa-195c-43ea-ae2b-267a8733add4/cinder-scheduler/0.log" Oct 08 08:17:42 crc kubenswrapper[4693]: I1008 08:17:42.095570 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_3b12dbfa-195c-43ea-ae2b-267a8733add4/probe/0.log" Oct 08 08:17:42 crc kubenswrapper[4693]: I1008 08:17:42.198925 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-727b5_2faa8ca0-e93e-4532-bf6c-00f2064bf177/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:17:42 crc kubenswrapper[4693]: I1008 08:17:42.297439 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd_eecc70b9-2687-499e-89e1-f2346e8088f6/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:17:42 crc kubenswrapper[4693]: I1008 08:17:42.599529 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-xf42b_06bebec7-3818-42dc-b357-7ef2ea40a463/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:17:42 crc kubenswrapper[4693]: I1008 08:17:42.726656 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-55478c4467-2nfsx_77ca5c51-5f17-4793-897e-235a54c041c2/init/0.log" Oct 08 08:17:42 crc kubenswrapper[4693]: I1008 08:17:42.955785 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-55478c4467-2nfsx_77ca5c51-5f17-4793-897e-235a54c041c2/init/0.log" Oct 08 08:17:42 crc kubenswrapper[4693]: I1008 08:17:42.972468 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-55478c4467-2nfsx_77ca5c51-5f17-4793-897e-235a54c041c2/dnsmasq-dns/0.log" Oct 08 08:17:43 crc kubenswrapper[4693]: I1008 08:17:43.180084 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-brvlv_a7a301e7-dfc4-47d6-acf1-f34b19e1e13a/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:17:43 crc kubenswrapper[4693]: I1008 08:17:43.259298 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_06c30f6d-189d-4e3e-98f3-156a7784963c/glance-httpd/0.log" Oct 08 08:17:43 crc kubenswrapper[4693]: I1008 08:17:43.375940 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_06c30f6d-189d-4e3e-98f3-156a7784963c/glance-log/0.log" Oct 08 08:17:43 crc kubenswrapper[4693]: I1008 08:17:43.466172 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_d34c5891-320c-402e-9ee6-0f75ba7e2bbb/glance-httpd/0.log" Oct 08 08:17:43 crc kubenswrapper[4693]: I1008 08:17:43.622350 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_d34c5891-320c-402e-9ee6-0f75ba7e2bbb/glance-log/0.log" Oct 08 08:17:43 crc kubenswrapper[4693]: I1008 08:17:43.730430 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-786b4cdb4-z6p8n_1f26734d-12eb-4c6c-9e68-254a30cea3b6/horizon/0.log" Oct 08 08:17:43 crc kubenswrapper[4693]: I1008 08:17:43.958318 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp_36d7c0e2-4414-4f5d-ace2-37e627b6e330/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:17:44 crc kubenswrapper[4693]: I1008 08:17:44.124486 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-786b4cdb4-z6p8n_1f26734d-12eb-4c6c-9e68-254a30cea3b6/horizon-log/0.log" Oct 08 08:17:44 crc kubenswrapper[4693]: I1008 08:17:44.192636 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-8r5hs_e8c4dd95-f13a-4479-99a8-9ea12766ac48/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:17:44 crc kubenswrapper[4693]: I1008 08:17:44.358223 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-5bbc7cbf94-5tkqs_e4dc4d60-5d83-4f09-986c-a394c44788b5/keystone-api/0.log" Oct 08 08:17:44 crc kubenswrapper[4693]: I1008 08:17:44.406396 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29331841-z6dhr_0e12eea6-ac5f-47c8-810b-b304ee039431/keystone-cron/0.log" Oct 08 08:17:44 crc kubenswrapper[4693]: I1008 08:17:44.522151 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_028998c5-3dec-46de-a5bb-bc5855df099e/kube-state-metrics/0.log" Oct 08 08:17:44 crc kubenswrapper[4693]: I1008 08:17:44.639801 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk_014202b7-db23-455e-ba57-d12d3b6e2975/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:17:44 crc kubenswrapper[4693]: I1008 08:17:44.946422 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-77bfdd5769-m42ll_e8c282e4-9865-41ec-922f-86d322b60ea0/neutron-api/0.log" Oct 08 08:17:44 crc kubenswrapper[4693]: I1008 08:17:44.987544 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-77bfdd5769-m42ll_e8c282e4-9865-41ec-922f-86d322b60ea0/neutron-httpd/0.log" Oct 08 08:17:45 crc kubenswrapper[4693]: I1008 08:17:45.235522 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc_0d3eae26-e892-4687-bd4c-4cbd1a566e56/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:17:45 crc kubenswrapper[4693]: I1008 08:17:45.736694 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_106dc2ce-316f-4e4e-a87c-ada5021fea4b/nova-api-log/0.log" Oct 08 08:17:45 crc kubenswrapper[4693]: I1008 08:17:45.871718 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_bf23439a-4d07-4711-9190-3fce06bdf2e4/nova-cell0-conductor-conductor/0.log" Oct 08 08:17:45 crc kubenswrapper[4693]: I1008 08:17:45.968515 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_106dc2ce-316f-4e4e-a87c-ada5021fea4b/nova-api-api/0.log" Oct 08 08:17:46 crc kubenswrapper[4693]: I1008 08:17:46.170650 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_2b15b479-b3e9-4af4-bb60-3f6ca0ed053e/nova-cell1-conductor-conductor/0.log" Oct 08 08:17:46 crc kubenswrapper[4693]: I1008 08:17:46.320304 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_532a7d37-716c-43b9-b417-8f9ab3ed3dcf/nova-cell1-novncproxy-novncproxy/0.log" Oct 08 08:17:46 crc kubenswrapper[4693]: I1008 08:17:46.539910 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-n87pd_14d12cdc-edb4-47c1-b245-b95cb21067bd/nova-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:17:46 crc kubenswrapper[4693]: I1008 08:17:46.736764 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_22aa81a6-83fc-4751-aa3b-c77361db77c0/nova-metadata-log/0.log" Oct 08 08:17:47 crc kubenswrapper[4693]: I1008 08:17:47.126183 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_4fcb903f-8f89-47a5-b120-a3e8daaaa2ae/nova-scheduler-scheduler/0.log" Oct 08 08:17:47 crc kubenswrapper[4693]: I1008 08:17:47.248660 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_c4cf2123-362f-4d9f-8080-bd9d6e13de17/mysql-bootstrap/0.log" Oct 08 08:17:47 crc kubenswrapper[4693]: I1008 08:17:47.439260 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_c4cf2123-362f-4d9f-8080-bd9d6e13de17/mysql-bootstrap/0.log" Oct 08 08:17:47 crc kubenswrapper[4693]: I1008 08:17:47.479497 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_c4cf2123-362f-4d9f-8080-bd9d6e13de17/galera/0.log" Oct 08 08:17:47 crc kubenswrapper[4693]: I1008 08:17:47.773935 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_1aa3187a-5fce-4486-a846-709a6231383f/mysql-bootstrap/0.log" Oct 08 08:17:47 crc kubenswrapper[4693]: I1008 08:17:47.984318 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_1aa3187a-5fce-4486-a846-709a6231383f/galera/0.log" Oct 08 08:17:47 crc kubenswrapper[4693]: I1008 08:17:47.996068 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_22aa81a6-83fc-4751-aa3b-c77361db77c0/nova-metadata-metadata/0.log" Oct 08 08:17:48 crc kubenswrapper[4693]: I1008 08:17:48.032774 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_1aa3187a-5fce-4486-a846-709a6231383f/mysql-bootstrap/0.log" Oct 08 08:17:48 crc kubenswrapper[4693]: I1008 08:17:48.213588 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_de8f5998-6e3d-4695-affe-f3afab2d2528/openstackclient/0.log" Oct 08 08:17:48 crc kubenswrapper[4693]: I1008 08:17:48.497234 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5mdkq_391f1e55-ea6a-4d2f-ae2a-08adfad94698/ovsdb-server-init/0.log" Oct 08 08:17:48 crc kubenswrapper[4693]: I1008 08:17:48.528258 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-47xpx_dd6332ac-70b3-4137-9419-3d394f270aa3/openstack-network-exporter/0.log" Oct 08 08:17:48 crc kubenswrapper[4693]: I1008 08:17:48.664322 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5mdkq_391f1e55-ea6a-4d2f-ae2a-08adfad94698/ovs-vswitchd/0.log" Oct 08 08:17:48 crc kubenswrapper[4693]: I1008 08:17:48.702768 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5mdkq_391f1e55-ea6a-4d2f-ae2a-08adfad94698/ovsdb-server-init/0.log" Oct 08 08:17:48 crc kubenswrapper[4693]: I1008 08:17:48.735791 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5mdkq_391f1e55-ea6a-4d2f-ae2a-08adfad94698/ovsdb-server/0.log" Oct 08 08:17:48 crc kubenswrapper[4693]: I1008 08:17:48.967755 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-qmltj_5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d/ovn-controller/0.log" Oct 08 08:17:49 crc kubenswrapper[4693]: I1008 08:17:49.231458 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-ncmdw_2715cea9-fa27-469b-988a-338c5b80f62d/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:17:49 crc kubenswrapper[4693]: I1008 08:17:49.247013 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_e01f071e-63e7-4a6e-b321-5f489621b814/openstack-network-exporter/0.log" Oct 08 08:17:49 crc kubenswrapper[4693]: I1008 08:17:49.363734 4693 scope.go:117] "RemoveContainer" containerID="28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" Oct 08 08:17:49 crc kubenswrapper[4693]: E1008 08:17:49.363973 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:17:49 crc kubenswrapper[4693]: I1008 08:17:49.481720 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_02c7bbbf-7730-4f24-b131-92411b14dcb0/openstack-network-exporter/0.log" Oct 08 08:17:49 crc kubenswrapper[4693]: I1008 08:17:49.507507 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_e01f071e-63e7-4a6e-b321-5f489621b814/ovn-northd/0.log" Oct 08 08:17:49 crc kubenswrapper[4693]: I1008 08:17:49.739148 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_5d809602-04d2-4d1f-b024-30fecd9b2256/openstack-network-exporter/0.log" Oct 08 08:17:49 crc kubenswrapper[4693]: I1008 08:17:49.746431 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_02c7bbbf-7730-4f24-b131-92411b14dcb0/ovsdbserver-nb/0.log" Oct 08 08:17:49 crc kubenswrapper[4693]: I1008 08:17:49.958862 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_5d809602-04d2-4d1f-b024-30fecd9b2256/ovsdbserver-sb/0.log" Oct 08 08:17:50 crc kubenswrapper[4693]: I1008 08:17:50.036947 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-84d7c4f8cb-75jz5_7285e65f-f435-4b74-8019-c5acad9b74c7/placement-api/0.log" Oct 08 08:17:50 crc kubenswrapper[4693]: I1008 08:17:50.229911 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-84d7c4f8cb-75jz5_7285e65f-f435-4b74-8019-c5acad9b74c7/placement-log/0.log" Oct 08 08:17:50 crc kubenswrapper[4693]: I1008 08:17:50.295457 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a/setup-container/0.log" Oct 08 08:17:50 crc kubenswrapper[4693]: I1008 08:17:50.477309 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a/setup-container/0.log" Oct 08 08:17:50 crc kubenswrapper[4693]: I1008 08:17:50.529787 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a/rabbitmq/0.log" Oct 08 08:17:50 crc kubenswrapper[4693]: I1008 08:17:50.695775 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c40b5a1f-c5fc-4885-9816-b7b2cfc98423/setup-container/0.log" Oct 08 08:17:50 crc kubenswrapper[4693]: I1008 08:17:50.900946 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c40b5a1f-c5fc-4885-9816-b7b2cfc98423/rabbitmq/0.log" Oct 08 08:17:50 crc kubenswrapper[4693]: I1008 08:17:50.921751 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c40b5a1f-c5fc-4885-9816-b7b2cfc98423/setup-container/0.log" Oct 08 08:17:51 crc kubenswrapper[4693]: I1008 08:17:51.114182 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg_cd25f9bb-a470-4aa4-8afa-6b484fa192c1/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:17:51 crc kubenswrapper[4693]: I1008 08:17:51.180115 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-jslrn_43bf2dc8-6a52-47ce-978e-9d9fef6ae67c/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:17:51 crc kubenswrapper[4693]: I1008 08:17:51.390136 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv_2213ef00-9e58-4d62-84f2-026ff39b7127/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:17:51 crc kubenswrapper[4693]: I1008 08:17:51.525392 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-c2gpk_4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:17:51 crc kubenswrapper[4693]: I1008 08:17:51.637397 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-dj5jp_ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399/ssh-known-hosts-edpm-deployment/0.log" Oct 08 08:17:51 crc kubenswrapper[4693]: I1008 08:17:51.885405 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-6db965f4c9-sszpw_ba5fbd22-39c2-49ae-a74f-ee328cb29a02/proxy-server/0.log" Oct 08 08:17:52 crc kubenswrapper[4693]: I1008 08:17:52.003936 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-6db965f4c9-sszpw_ba5fbd22-39c2-49ae-a74f-ee328cb29a02/proxy-httpd/0.log" Oct 08 08:17:52 crc kubenswrapper[4693]: I1008 08:17:52.097899 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-m9dkh_7db9c043-f734-4339-8691-8276fc1a459b/swift-ring-rebalance/0.log" Oct 08 08:17:52 crc kubenswrapper[4693]: I1008 08:17:52.271386 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/account-auditor/0.log" Oct 08 08:17:52 crc kubenswrapper[4693]: I1008 08:17:52.350931 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/account-reaper/0.log" Oct 08 08:17:52 crc kubenswrapper[4693]: I1008 08:17:52.510950 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/account-replicator/0.log" Oct 08 08:17:52 crc kubenswrapper[4693]: I1008 08:17:52.512092 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/account-server/0.log" Oct 08 08:17:52 crc kubenswrapper[4693]: I1008 08:17:52.548367 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/container-auditor/0.log" Oct 08 08:17:52 crc kubenswrapper[4693]: I1008 08:17:52.729438 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/container-server/0.log" Oct 08 08:17:52 crc kubenswrapper[4693]: I1008 08:17:52.740976 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/container-replicator/0.log" Oct 08 08:17:52 crc kubenswrapper[4693]: I1008 08:17:52.754277 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/container-updater/0.log" Oct 08 08:17:52 crc kubenswrapper[4693]: I1008 08:17:52.961312 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/object-replicator/0.log" Oct 08 08:17:52 crc kubenswrapper[4693]: I1008 08:17:52.964935 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/object-expirer/0.log" Oct 08 08:17:52 crc kubenswrapper[4693]: I1008 08:17:52.972673 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/object-auditor/0.log" Oct 08 08:17:53 crc kubenswrapper[4693]: I1008 08:17:53.188623 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/object-updater/0.log" Oct 08 08:17:53 crc kubenswrapper[4693]: I1008 08:17:53.206838 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/object-server/0.log" Oct 08 08:17:53 crc kubenswrapper[4693]: I1008 08:17:53.241955 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/rsync/0.log" Oct 08 08:17:53 crc kubenswrapper[4693]: I1008 08:17:53.342341 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/swift-recon-cron/0.log" Oct 08 08:17:53 crc kubenswrapper[4693]: I1008 08:17:53.522303 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh_be7009a4-69bd-41cc-8fe8-02e5d79db395/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:17:53 crc kubenswrapper[4693]: I1008 08:17:53.705953 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_125016ff-a340-49c8-8c6f-9eed2093e1af/tempest-tests-tempest-tests-runner/0.log" Oct 08 08:17:53 crc kubenswrapper[4693]: I1008 08:17:53.761900 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_fed84ccb-a071-4df4-a6d4-1e5c227a609c/test-operator-logs-container/0.log" Oct 08 08:17:53 crc kubenswrapper[4693]: I1008 08:17:53.975560 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz_4b1ce098-43e7-44eb-8416-806097ba000e/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:18:00 crc kubenswrapper[4693]: I1008 08:18:00.862681 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_225a20e0-eec7-4b8c-89e1-b4a2ebb513a3/memcached/0.log" Oct 08 08:18:01 crc kubenswrapper[4693]: I1008 08:18:01.363373 4693 scope.go:117] "RemoveContainer" containerID="28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" Oct 08 08:18:01 crc kubenswrapper[4693]: E1008 08:18:01.364371 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:18:14 crc kubenswrapper[4693]: I1008 08:18:14.363750 4693 scope.go:117] "RemoveContainer" containerID="28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" Oct 08 08:18:14 crc kubenswrapper[4693]: E1008 08:18:14.365144 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:18:29 crc kubenswrapper[4693]: I1008 08:18:29.363661 4693 scope.go:117] "RemoveContainer" containerID="28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" Oct 08 08:18:29 crc kubenswrapper[4693]: E1008 08:18:29.364550 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:18:41 crc kubenswrapper[4693]: I1008 08:18:41.363392 4693 scope.go:117] "RemoveContainer" containerID="28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" Oct 08 08:18:41 crc kubenswrapper[4693]: E1008 08:18:41.364391 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:18:45 crc kubenswrapper[4693]: I1008 08:18:45.663993 4693 generic.go:334] "Generic (PLEG): container finished" podID="8e32391b-049b-46ce-beb4-2a08fb1c6817" containerID="76d84a07018901c6349af1c520ed7155f8bdb4b872a22d8a26222a3aec6e45b4" exitCode=0 Oct 08 08:18:45 crc kubenswrapper[4693]: I1008 08:18:45.664091 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nth5x/crc-debug-vnsbh" event={"ID":"8e32391b-049b-46ce-beb4-2a08fb1c6817","Type":"ContainerDied","Data":"76d84a07018901c6349af1c520ed7155f8bdb4b872a22d8a26222a3aec6e45b4"} Oct 08 08:18:46 crc kubenswrapper[4693]: I1008 08:18:46.828294 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nth5x/crc-debug-vnsbh" Oct 08 08:18:46 crc kubenswrapper[4693]: I1008 08:18:46.887712 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-nth5x/crc-debug-vnsbh"] Oct 08 08:18:46 crc kubenswrapper[4693]: I1008 08:18:46.897432 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-nth5x/crc-debug-vnsbh"] Oct 08 08:18:46 crc kubenswrapper[4693]: I1008 08:18:46.927112 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lg2tt\" (UniqueName: \"kubernetes.io/projected/8e32391b-049b-46ce-beb4-2a08fb1c6817-kube-api-access-lg2tt\") pod \"8e32391b-049b-46ce-beb4-2a08fb1c6817\" (UID: \"8e32391b-049b-46ce-beb4-2a08fb1c6817\") " Oct 08 08:18:46 crc kubenswrapper[4693]: I1008 08:18:46.927240 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8e32391b-049b-46ce-beb4-2a08fb1c6817-host\") pod \"8e32391b-049b-46ce-beb4-2a08fb1c6817\" (UID: \"8e32391b-049b-46ce-beb4-2a08fb1c6817\") " Oct 08 08:18:46 crc kubenswrapper[4693]: I1008 08:18:46.927456 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8e32391b-049b-46ce-beb4-2a08fb1c6817-host" (OuterVolumeSpecName: "host") pod "8e32391b-049b-46ce-beb4-2a08fb1c6817" (UID: "8e32391b-049b-46ce-beb4-2a08fb1c6817"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 08:18:46 crc kubenswrapper[4693]: I1008 08:18:46.928176 4693 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8e32391b-049b-46ce-beb4-2a08fb1c6817-host\") on node \"crc\" DevicePath \"\"" Oct 08 08:18:46 crc kubenswrapper[4693]: I1008 08:18:46.935945 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e32391b-049b-46ce-beb4-2a08fb1c6817-kube-api-access-lg2tt" (OuterVolumeSpecName: "kube-api-access-lg2tt") pod "8e32391b-049b-46ce-beb4-2a08fb1c6817" (UID: "8e32391b-049b-46ce-beb4-2a08fb1c6817"). InnerVolumeSpecName "kube-api-access-lg2tt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 08:18:47 crc kubenswrapper[4693]: I1008 08:18:47.029434 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lg2tt\" (UniqueName: \"kubernetes.io/projected/8e32391b-049b-46ce-beb4-2a08fb1c6817-kube-api-access-lg2tt\") on node \"crc\" DevicePath \"\"" Oct 08 08:18:47 crc kubenswrapper[4693]: I1008 08:18:47.379626 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e32391b-049b-46ce-beb4-2a08fb1c6817" path="/var/lib/kubelet/pods/8e32391b-049b-46ce-beb4-2a08fb1c6817/volumes" Oct 08 08:18:47 crc kubenswrapper[4693]: I1008 08:18:47.692348 4693 scope.go:117] "RemoveContainer" containerID="76d84a07018901c6349af1c520ed7155f8bdb4b872a22d8a26222a3aec6e45b4" Oct 08 08:18:47 crc kubenswrapper[4693]: I1008 08:18:47.692427 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nth5x/crc-debug-vnsbh" Oct 08 08:18:48 crc kubenswrapper[4693]: I1008 08:18:48.144718 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-nth5x/crc-debug-kkzg2"] Oct 08 08:18:48 crc kubenswrapper[4693]: E1008 08:18:48.145280 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e32391b-049b-46ce-beb4-2a08fb1c6817" containerName="container-00" Oct 08 08:18:48 crc kubenswrapper[4693]: I1008 08:18:48.145297 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e32391b-049b-46ce-beb4-2a08fb1c6817" containerName="container-00" Oct 08 08:18:48 crc kubenswrapper[4693]: I1008 08:18:48.145612 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e32391b-049b-46ce-beb4-2a08fb1c6817" containerName="container-00" Oct 08 08:18:48 crc kubenswrapper[4693]: I1008 08:18:48.146566 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nth5x/crc-debug-kkzg2" Oct 08 08:18:48 crc kubenswrapper[4693]: I1008 08:18:48.153561 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7de14b17-e329-4d09-a30c-2563b5173032-host\") pod \"crc-debug-kkzg2\" (UID: \"7de14b17-e329-4d09-a30c-2563b5173032\") " pod="openshift-must-gather-nth5x/crc-debug-kkzg2" Oct 08 08:18:48 crc kubenswrapper[4693]: I1008 08:18:48.153748 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6x2ps\" (UniqueName: \"kubernetes.io/projected/7de14b17-e329-4d09-a30c-2563b5173032-kube-api-access-6x2ps\") pod \"crc-debug-kkzg2\" (UID: \"7de14b17-e329-4d09-a30c-2563b5173032\") " pod="openshift-must-gather-nth5x/crc-debug-kkzg2" Oct 08 08:18:48 crc kubenswrapper[4693]: I1008 08:18:48.155705 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-nth5x"/"default-dockercfg-72qn5" Oct 08 08:18:48 crc kubenswrapper[4693]: I1008 08:18:48.255180 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7de14b17-e329-4d09-a30c-2563b5173032-host\") pod \"crc-debug-kkzg2\" (UID: \"7de14b17-e329-4d09-a30c-2563b5173032\") " pod="openshift-must-gather-nth5x/crc-debug-kkzg2" Oct 08 08:18:48 crc kubenswrapper[4693]: I1008 08:18:48.255252 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6x2ps\" (UniqueName: \"kubernetes.io/projected/7de14b17-e329-4d09-a30c-2563b5173032-kube-api-access-6x2ps\") pod \"crc-debug-kkzg2\" (UID: \"7de14b17-e329-4d09-a30c-2563b5173032\") " pod="openshift-must-gather-nth5x/crc-debug-kkzg2" Oct 08 08:18:48 crc kubenswrapper[4693]: I1008 08:18:48.255310 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7de14b17-e329-4d09-a30c-2563b5173032-host\") pod \"crc-debug-kkzg2\" (UID: \"7de14b17-e329-4d09-a30c-2563b5173032\") " pod="openshift-must-gather-nth5x/crc-debug-kkzg2" Oct 08 08:18:48 crc kubenswrapper[4693]: I1008 08:18:48.281092 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6x2ps\" (UniqueName: \"kubernetes.io/projected/7de14b17-e329-4d09-a30c-2563b5173032-kube-api-access-6x2ps\") pod \"crc-debug-kkzg2\" (UID: \"7de14b17-e329-4d09-a30c-2563b5173032\") " pod="openshift-must-gather-nth5x/crc-debug-kkzg2" Oct 08 08:18:48 crc kubenswrapper[4693]: I1008 08:18:48.470587 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nth5x/crc-debug-kkzg2" Oct 08 08:18:48 crc kubenswrapper[4693]: I1008 08:18:48.712152 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nth5x/crc-debug-kkzg2" event={"ID":"7de14b17-e329-4d09-a30c-2563b5173032","Type":"ContainerStarted","Data":"0caf0e076f79a52c87ec79b0187bf92d4e53b344623b33f476e9f4c98b4c9f5f"} Oct 08 08:18:49 crc kubenswrapper[4693]: I1008 08:18:49.732614 4693 generic.go:334] "Generic (PLEG): container finished" podID="7de14b17-e329-4d09-a30c-2563b5173032" containerID="15d36d043a23583cb9317b8d9039527c1ef3893625605ac4444543338b531681" exitCode=0 Oct 08 08:18:49 crc kubenswrapper[4693]: I1008 08:18:49.732687 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nth5x/crc-debug-kkzg2" event={"ID":"7de14b17-e329-4d09-a30c-2563b5173032","Type":"ContainerDied","Data":"15d36d043a23583cb9317b8d9039527c1ef3893625605ac4444543338b531681"} Oct 08 08:18:50 crc kubenswrapper[4693]: I1008 08:18:50.822854 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nth5x/crc-debug-kkzg2" Oct 08 08:18:51 crc kubenswrapper[4693]: I1008 08:18:51.007097 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6x2ps\" (UniqueName: \"kubernetes.io/projected/7de14b17-e329-4d09-a30c-2563b5173032-kube-api-access-6x2ps\") pod \"7de14b17-e329-4d09-a30c-2563b5173032\" (UID: \"7de14b17-e329-4d09-a30c-2563b5173032\") " Oct 08 08:18:51 crc kubenswrapper[4693]: I1008 08:18:51.007445 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7de14b17-e329-4d09-a30c-2563b5173032-host\") pod \"7de14b17-e329-4d09-a30c-2563b5173032\" (UID: \"7de14b17-e329-4d09-a30c-2563b5173032\") " Oct 08 08:18:51 crc kubenswrapper[4693]: I1008 08:18:51.007491 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7de14b17-e329-4d09-a30c-2563b5173032-host" (OuterVolumeSpecName: "host") pod "7de14b17-e329-4d09-a30c-2563b5173032" (UID: "7de14b17-e329-4d09-a30c-2563b5173032"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 08:18:51 crc kubenswrapper[4693]: I1008 08:18:51.008237 4693 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7de14b17-e329-4d09-a30c-2563b5173032-host\") on node \"crc\" DevicePath \"\"" Oct 08 08:18:51 crc kubenswrapper[4693]: I1008 08:18:51.026730 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7de14b17-e329-4d09-a30c-2563b5173032-kube-api-access-6x2ps" (OuterVolumeSpecName: "kube-api-access-6x2ps") pod "7de14b17-e329-4d09-a30c-2563b5173032" (UID: "7de14b17-e329-4d09-a30c-2563b5173032"). InnerVolumeSpecName "kube-api-access-6x2ps". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 08:18:51 crc kubenswrapper[4693]: I1008 08:18:51.110411 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6x2ps\" (UniqueName: \"kubernetes.io/projected/7de14b17-e329-4d09-a30c-2563b5173032-kube-api-access-6x2ps\") on node \"crc\" DevicePath \"\"" Oct 08 08:18:51 crc kubenswrapper[4693]: I1008 08:18:51.752895 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nth5x/crc-debug-kkzg2" event={"ID":"7de14b17-e329-4d09-a30c-2563b5173032","Type":"ContainerDied","Data":"0caf0e076f79a52c87ec79b0187bf92d4e53b344623b33f476e9f4c98b4c9f5f"} Oct 08 08:18:51 crc kubenswrapper[4693]: I1008 08:18:51.753476 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0caf0e076f79a52c87ec79b0187bf92d4e53b344623b33f476e9f4c98b4c9f5f" Oct 08 08:18:51 crc kubenswrapper[4693]: I1008 08:18:51.753344 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nth5x/crc-debug-kkzg2" Oct 08 08:18:53 crc kubenswrapper[4693]: I1008 08:18:53.362384 4693 scope.go:117] "RemoveContainer" containerID="28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" Oct 08 08:18:53 crc kubenswrapper[4693]: E1008 08:18:53.363101 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:18:56 crc kubenswrapper[4693]: I1008 08:18:56.647025 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-nth5x/crc-debug-kkzg2"] Oct 08 08:18:56 crc kubenswrapper[4693]: I1008 08:18:56.655582 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-nth5x/crc-debug-kkzg2"] Oct 08 08:18:57 crc kubenswrapper[4693]: I1008 08:18:57.383465 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7de14b17-e329-4d09-a30c-2563b5173032" path="/var/lib/kubelet/pods/7de14b17-e329-4d09-a30c-2563b5173032/volumes" Oct 08 08:18:57 crc kubenswrapper[4693]: I1008 08:18:57.847861 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-nth5x/crc-debug-cnb64"] Oct 08 08:18:57 crc kubenswrapper[4693]: E1008 08:18:57.848257 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7de14b17-e329-4d09-a30c-2563b5173032" containerName="container-00" Oct 08 08:18:57 crc kubenswrapper[4693]: I1008 08:18:57.848272 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="7de14b17-e329-4d09-a30c-2563b5173032" containerName="container-00" Oct 08 08:18:57 crc kubenswrapper[4693]: I1008 08:18:57.848468 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="7de14b17-e329-4d09-a30c-2563b5173032" containerName="container-00" Oct 08 08:18:57 crc kubenswrapper[4693]: I1008 08:18:57.849177 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nth5x/crc-debug-cnb64" Oct 08 08:18:57 crc kubenswrapper[4693]: I1008 08:18:57.852041 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-nth5x"/"default-dockercfg-72qn5" Oct 08 08:18:57 crc kubenswrapper[4693]: I1008 08:18:57.935338 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjfdk\" (UniqueName: \"kubernetes.io/projected/7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead-kube-api-access-wjfdk\") pod \"crc-debug-cnb64\" (UID: \"7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead\") " pod="openshift-must-gather-nth5x/crc-debug-cnb64" Oct 08 08:18:57 crc kubenswrapper[4693]: I1008 08:18:57.935426 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead-host\") pod \"crc-debug-cnb64\" (UID: \"7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead\") " pod="openshift-must-gather-nth5x/crc-debug-cnb64" Oct 08 08:18:58 crc kubenswrapper[4693]: I1008 08:18:58.037666 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjfdk\" (UniqueName: \"kubernetes.io/projected/7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead-kube-api-access-wjfdk\") pod \"crc-debug-cnb64\" (UID: \"7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead\") " pod="openshift-must-gather-nth5x/crc-debug-cnb64" Oct 08 08:18:58 crc kubenswrapper[4693]: I1008 08:18:58.037787 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead-host\") pod \"crc-debug-cnb64\" (UID: \"7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead\") " pod="openshift-must-gather-nth5x/crc-debug-cnb64" Oct 08 08:18:58 crc kubenswrapper[4693]: I1008 08:18:58.038017 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead-host\") pod \"crc-debug-cnb64\" (UID: \"7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead\") " pod="openshift-must-gather-nth5x/crc-debug-cnb64" Oct 08 08:18:58 crc kubenswrapper[4693]: I1008 08:18:58.074743 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjfdk\" (UniqueName: \"kubernetes.io/projected/7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead-kube-api-access-wjfdk\") pod \"crc-debug-cnb64\" (UID: \"7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead\") " pod="openshift-must-gather-nth5x/crc-debug-cnb64" Oct 08 08:18:58 crc kubenswrapper[4693]: I1008 08:18:58.177592 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nth5x/crc-debug-cnb64" Oct 08 08:18:58 crc kubenswrapper[4693]: I1008 08:18:58.815598 4693 generic.go:334] "Generic (PLEG): container finished" podID="7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead" containerID="2df2d10a28102ea583424d6fdb3bc06318450698fa1dfb1f6961b049c63dc6b1" exitCode=0 Oct 08 08:18:58 crc kubenswrapper[4693]: I1008 08:18:58.815717 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nth5x/crc-debug-cnb64" event={"ID":"7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead","Type":"ContainerDied","Data":"2df2d10a28102ea583424d6fdb3bc06318450698fa1dfb1f6961b049c63dc6b1"} Oct 08 08:18:58 crc kubenswrapper[4693]: I1008 08:18:58.816256 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nth5x/crc-debug-cnb64" event={"ID":"7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead","Type":"ContainerStarted","Data":"15c5b705f548d699751ba18878a1ffcade3f91ab1e2be9e112bc0b5689074a61"} Oct 08 08:18:58 crc kubenswrapper[4693]: I1008 08:18:58.860575 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-nth5x/crc-debug-cnb64"] Oct 08 08:18:58 crc kubenswrapper[4693]: I1008 08:18:58.868196 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-nth5x/crc-debug-cnb64"] Oct 08 08:18:59 crc kubenswrapper[4693]: I1008 08:18:59.919532 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nth5x/crc-debug-cnb64" Oct 08 08:19:00 crc kubenswrapper[4693]: I1008 08:19:00.074020 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead-host\") pod \"7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead\" (UID: \"7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead\") " Oct 08 08:19:00 crc kubenswrapper[4693]: I1008 08:19:00.074417 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wjfdk\" (UniqueName: \"kubernetes.io/projected/7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead-kube-api-access-wjfdk\") pod \"7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead\" (UID: \"7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead\") " Oct 08 08:19:00 crc kubenswrapper[4693]: I1008 08:19:00.074512 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead-host" (OuterVolumeSpecName: "host") pod "7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead" (UID: "7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 08:19:00 crc kubenswrapper[4693]: I1008 08:19:00.075011 4693 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead-host\") on node \"crc\" DevicePath \"\"" Oct 08 08:19:00 crc kubenswrapper[4693]: I1008 08:19:00.080956 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead-kube-api-access-wjfdk" (OuterVolumeSpecName: "kube-api-access-wjfdk") pod "7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead" (UID: "7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead"). InnerVolumeSpecName "kube-api-access-wjfdk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 08:19:00 crc kubenswrapper[4693]: I1008 08:19:00.177251 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wjfdk\" (UniqueName: \"kubernetes.io/projected/7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead-kube-api-access-wjfdk\") on node \"crc\" DevicePath \"\"" Oct 08 08:19:00 crc kubenswrapper[4693]: I1008 08:19:00.443047 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn_b56656be-6638-40df-b380-d88dbc891f53/util/0.log" Oct 08 08:19:00 crc kubenswrapper[4693]: I1008 08:19:00.654944 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn_b56656be-6638-40df-b380-d88dbc891f53/util/0.log" Oct 08 08:19:00 crc kubenswrapper[4693]: I1008 08:19:00.656545 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn_b56656be-6638-40df-b380-d88dbc891f53/pull/0.log" Oct 08 08:19:00 crc kubenswrapper[4693]: I1008 08:19:00.659766 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn_b56656be-6638-40df-b380-d88dbc891f53/pull/0.log" Oct 08 08:19:00 crc kubenswrapper[4693]: I1008 08:19:00.806709 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn_b56656be-6638-40df-b380-d88dbc891f53/extract/0.log" Oct 08 08:19:00 crc kubenswrapper[4693]: I1008 08:19:00.844577 4693 scope.go:117] "RemoveContainer" containerID="2df2d10a28102ea583424d6fdb3bc06318450698fa1dfb1f6961b049c63dc6b1" Oct 08 08:19:00 crc kubenswrapper[4693]: I1008 08:19:00.844585 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nth5x/crc-debug-cnb64" Oct 08 08:19:00 crc kubenswrapper[4693]: I1008 08:19:00.852029 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn_b56656be-6638-40df-b380-d88dbc891f53/util/0.log" Oct 08 08:19:00 crc kubenswrapper[4693]: I1008 08:19:00.853076 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn_b56656be-6638-40df-b380-d88dbc891f53/pull/0.log" Oct 08 08:19:01 crc kubenswrapper[4693]: I1008 08:19:01.000839 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-58c4cd55f4-5tq42_cd94a973-75b2-4722-a298-16e6bd67aa61/kube-rbac-proxy/0.log" Oct 08 08:19:01 crc kubenswrapper[4693]: I1008 08:19:01.038785 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-7d4d4f8d-d2ztn_b855e40c-e0b0-4322-8099-d4e51c0b92f1/kube-rbac-proxy/0.log" Oct 08 08:19:01 crc kubenswrapper[4693]: I1008 08:19:01.085305 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-58c4cd55f4-5tq42_cd94a973-75b2-4722-a298-16e6bd67aa61/manager/0.log" Oct 08 08:19:01 crc kubenswrapper[4693]: I1008 08:19:01.188684 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-7d4d4f8d-d2ztn_b855e40c-e0b0-4322-8099-d4e51c0b92f1/manager/0.log" Oct 08 08:19:01 crc kubenswrapper[4693]: I1008 08:19:01.247743 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-75dfd9b554-dmgt7_2f8dab68-da73-412a-bf83-95f2ac37f289/kube-rbac-proxy/0.log" Oct 08 08:19:01 crc kubenswrapper[4693]: I1008 08:19:01.255155 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-75dfd9b554-dmgt7_2f8dab68-da73-412a-bf83-95f2ac37f289/manager/0.log" Oct 08 08:19:01 crc kubenswrapper[4693]: I1008 08:19:01.379207 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead" path="/var/lib/kubelet/pods/7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead/volumes" Oct 08 08:19:01 crc kubenswrapper[4693]: I1008 08:19:01.397890 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5dc44df7d5-hbqzd_72f3f2ae-ba07-4045-9ac4-fc4f0dee2682/kube-rbac-proxy/0.log" Oct 08 08:19:01 crc kubenswrapper[4693]: I1008 08:19:01.495824 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5dc44df7d5-hbqzd_72f3f2ae-ba07-4045-9ac4-fc4f0dee2682/manager/0.log" Oct 08 08:19:01 crc kubenswrapper[4693]: I1008 08:19:01.569290 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-54b4974c45-h25pk_81212063-ccc7-423c-b817-60f7280ee4f9/manager/0.log" Oct 08 08:19:01 crc kubenswrapper[4693]: I1008 08:19:01.573154 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-54b4974c45-h25pk_81212063-ccc7-423c-b817-60f7280ee4f9/kube-rbac-proxy/0.log" Oct 08 08:19:01 crc kubenswrapper[4693]: I1008 08:19:01.661182 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-76d5b87f47-njkdz_bfc23a1a-faab-44e8-91f7-29d4e95f0fdc/kube-rbac-proxy/0.log" Oct 08 08:19:01 crc kubenswrapper[4693]: I1008 08:19:01.766435 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-76d5b87f47-njkdz_bfc23a1a-faab-44e8-91f7-29d4e95f0fdc/manager/0.log" Oct 08 08:19:01 crc kubenswrapper[4693]: I1008 08:19:01.846304 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-658588b8c9-6vt47_6fc858ec-6edd-4e45-ba44-fe2ea26a0614/kube-rbac-proxy/0.log" Oct 08 08:19:01 crc kubenswrapper[4693]: I1008 08:19:01.988919 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-658588b8c9-6vt47_6fc858ec-6edd-4e45-ba44-fe2ea26a0614/manager/0.log" Oct 08 08:19:02 crc kubenswrapper[4693]: I1008 08:19:02.023405 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-649675d675-w96xx_d96bb98a-f416-4d93-b145-37632210f2f8/kube-rbac-proxy/0.log" Oct 08 08:19:02 crc kubenswrapper[4693]: I1008 08:19:02.050599 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-649675d675-w96xx_d96bb98a-f416-4d93-b145-37632210f2f8/manager/0.log" Oct 08 08:19:02 crc kubenswrapper[4693]: I1008 08:19:02.158832 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7b5ccf6d9c-vqp2x_d26d490a-dba0-46d4-b636-836a4dde53be/kube-rbac-proxy/0.log" Oct 08 08:19:02 crc kubenswrapper[4693]: I1008 08:19:02.260033 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7b5ccf6d9c-vqp2x_d26d490a-dba0-46d4-b636-836a4dde53be/manager/0.log" Oct 08 08:19:02 crc kubenswrapper[4693]: I1008 08:19:02.334256 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-65d89cfd9f-blmhk_b1c54c65-b7f3-4f2e-91c8-c04f25b42d3f/kube-rbac-proxy/0.log" Oct 08 08:19:02 crc kubenswrapper[4693]: I1008 08:19:02.366024 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-65d89cfd9f-blmhk_b1c54c65-b7f3-4f2e-91c8-c04f25b42d3f/manager/0.log" Oct 08 08:19:02 crc kubenswrapper[4693]: I1008 08:19:02.427517 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-6cd6d7bdf5-65sjq_6772aabf-b5fa-4fc7-8925-0926ed242e9b/kube-rbac-proxy/0.log" Oct 08 08:19:02 crc kubenswrapper[4693]: I1008 08:19:02.547272 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-6cd6d7bdf5-65sjq_6772aabf-b5fa-4fc7-8925-0926ed242e9b/manager/0.log" Oct 08 08:19:02 crc kubenswrapper[4693]: I1008 08:19:02.555894 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-8d984cc4d-567tm_3fedcb35-9741-40ee-bdb0-a1d78a5da3e6/kube-rbac-proxy/0.log" Oct 08 08:19:02 crc kubenswrapper[4693]: I1008 08:19:02.636419 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-8d984cc4d-567tm_3fedcb35-9741-40ee-bdb0-a1d78a5da3e6/manager/0.log" Oct 08 08:19:02 crc kubenswrapper[4693]: I1008 08:19:02.762876 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-7c7fc454ff-lhs5r_cba353e7-9050-4433-a6b6-2ca4f67d077a/kube-rbac-proxy/0.log" Oct 08 08:19:02 crc kubenswrapper[4693]: I1008 08:19:02.846017 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-7c7fc454ff-lhs5r_cba353e7-9050-4433-a6b6-2ca4f67d077a/manager/0.log" Oct 08 08:19:02 crc kubenswrapper[4693]: I1008 08:19:02.930094 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7468f855d8-4zvpz_422c537a-d341-45ac-ac02-3fb221b66ed4/kube-rbac-proxy/0.log" Oct 08 08:19:02 crc kubenswrapper[4693]: I1008 08:19:02.967285 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7468f855d8-4zvpz_422c537a-d341-45ac-ac02-3fb221b66ed4/manager/0.log" Oct 08 08:19:03 crc kubenswrapper[4693]: I1008 08:19:03.033783 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w_a45ca91b-ddca-4c17-ab8b-d106345451d3/kube-rbac-proxy/0.log" Oct 08 08:19:03 crc kubenswrapper[4693]: I1008 08:19:03.095234 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w_a45ca91b-ddca-4c17-ab8b-d106345451d3/manager/0.log" Oct 08 08:19:03 crc kubenswrapper[4693]: I1008 08:19:03.182792 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7f66b9c549-m8hmw_5bfb052c-4d4a-47df-bb42-25424b56cb92/kube-rbac-proxy/0.log" Oct 08 08:19:03 crc kubenswrapper[4693]: I1008 08:19:03.403929 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-77c8f66d44-864bb_23996d41-f11a-4a8a-8a71-3e7f93978efc/kube-rbac-proxy/0.log" Oct 08 08:19:03 crc kubenswrapper[4693]: I1008 08:19:03.514061 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-77c8f66d44-864bb_23996d41-f11a-4a8a-8a71-3e7f93978efc/operator/0.log" Oct 08 08:19:03 crc kubenswrapper[4693]: I1008 08:19:03.632097 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-qbdg8_875357f9-bf15-47db-83a9-12868aca6f98/registry-server/0.log" Oct 08 08:19:03 crc kubenswrapper[4693]: I1008 08:19:03.783848 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-6d8b6f9b9-jtx9z_a8912110-fa72-4e6b-9c38-7b62b34772fa/kube-rbac-proxy/0.log" Oct 08 08:19:03 crc kubenswrapper[4693]: I1008 08:19:03.880748 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-6d8b6f9b9-jtx9z_a8912110-fa72-4e6b-9c38-7b62b34772fa/manager/0.log" Oct 08 08:19:03 crc kubenswrapper[4693]: I1008 08:19:03.912984 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-54689d9f88-h2npd_9d40d3d9-711e-461b-b859-684b1af38ee9/kube-rbac-proxy/0.log" Oct 08 08:19:03 crc kubenswrapper[4693]: I1008 08:19:03.980372 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-54689d9f88-h2npd_9d40d3d9-711e-461b-b859-684b1af38ee9/manager/0.log" Oct 08 08:19:04 crc kubenswrapper[4693]: I1008 08:19:04.087170 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-lqlgw_b2a1cce0-35c1-46ed-b375-bb70c8a7c15f/operator/0.log" Oct 08 08:19:04 crc kubenswrapper[4693]: I1008 08:19:04.226913 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6859f9b676-7sw5d_78a7e32f-67dc-454f-b65c-8a8a2605d139/kube-rbac-proxy/0.log" Oct 08 08:19:04 crc kubenswrapper[4693]: I1008 08:19:04.235431 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7f66b9c549-m8hmw_5bfb052c-4d4a-47df-bb42-25424b56cb92/manager/0.log" Oct 08 08:19:04 crc kubenswrapper[4693]: I1008 08:19:04.281879 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6859f9b676-7sw5d_78a7e32f-67dc-454f-b65c-8a8a2605d139/manager/0.log" Oct 08 08:19:04 crc kubenswrapper[4693]: I1008 08:19:04.337024 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5d4d74dd89-6h28f_d3a3ae96-9b43-42ab-b688-95e141f326f4/kube-rbac-proxy/0.log" Oct 08 08:19:04 crc kubenswrapper[4693]: I1008 08:19:04.449155 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5d4d74dd89-6h28f_d3a3ae96-9b43-42ab-b688-95e141f326f4/manager/0.log" Oct 08 08:19:04 crc kubenswrapper[4693]: I1008 08:19:04.464936 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd5cb47d7-9bz2k_f1d2f8b8-c6f2-450b-9e54-6a14b37e7a68/kube-rbac-proxy/0.log" Oct 08 08:19:04 crc kubenswrapper[4693]: I1008 08:19:04.486112 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd5cb47d7-9bz2k_f1d2f8b8-c6f2-450b-9e54-6a14b37e7a68/manager/0.log" Oct 08 08:19:04 crc kubenswrapper[4693]: I1008 08:19:04.615310 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6cbc6dd547-h74m5_09c1a297-4a54-430a-a78e-134db76611b9/manager/0.log" Oct 08 08:19:04 crc kubenswrapper[4693]: I1008 08:19:04.621225 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6cbc6dd547-h74m5_09c1a297-4a54-430a-a78e-134db76611b9/kube-rbac-proxy/0.log" Oct 08 08:19:07 crc kubenswrapper[4693]: I1008 08:19:07.363538 4693 scope.go:117] "RemoveContainer" containerID="28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" Oct 08 08:19:07 crc kubenswrapper[4693]: E1008 08:19:07.364036 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:19:21 crc kubenswrapper[4693]: I1008 08:19:21.260972 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-9t8ns_c2d8737c-16dd-429e-a6e0-3d2c35877083/control-plane-machine-set-operator/0.log" Oct 08 08:19:21 crc kubenswrapper[4693]: I1008 08:19:21.363057 4693 scope.go:117] "RemoveContainer" containerID="28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" Oct 08 08:19:21 crc kubenswrapper[4693]: E1008 08:19:21.363543 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:19:21 crc kubenswrapper[4693]: I1008 08:19:21.482642 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-dh66b_b303a16a-8059-4d90-91ac-2ba5c953f346/kube-rbac-proxy/0.log" Oct 08 08:19:21 crc kubenswrapper[4693]: I1008 08:19:21.496847 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-dh66b_b303a16a-8059-4d90-91ac-2ba5c953f346/machine-api-operator/0.log" Oct 08 08:19:34 crc kubenswrapper[4693]: I1008 08:19:34.362906 4693 scope.go:117] "RemoveContainer" containerID="28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" Oct 08 08:19:34 crc kubenswrapper[4693]: E1008 08:19:34.363730 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:19:35 crc kubenswrapper[4693]: I1008 08:19:35.615265 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-69p7f_c4e1c031-3b17-4c19-80f7-f37b55c3cb4a/cert-manager-controller/0.log" Oct 08 08:19:35 crc kubenswrapper[4693]: I1008 08:19:35.788165 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-55lln_3e9c884b-9e83-4f39-b92c-c278a1a08a2a/cert-manager-cainjector/0.log" Oct 08 08:19:35 crc kubenswrapper[4693]: I1008 08:19:35.844731 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-c4c8h_8d52a50f-bc7f-4317-a82a-678905b53fcc/cert-manager-webhook/0.log" Oct 08 08:19:47 crc kubenswrapper[4693]: I1008 08:19:47.363526 4693 scope.go:117] "RemoveContainer" containerID="28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" Oct 08 08:19:47 crc kubenswrapper[4693]: E1008 08:19:47.364609 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:19:48 crc kubenswrapper[4693]: I1008 08:19:48.707705 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-j658k_5ad24461-fc5f-44fd-94e2-68b8ef30e152/nmstate-console-plugin/0.log" Oct 08 08:19:48 crc kubenswrapper[4693]: I1008 08:19:48.873755 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-j9lkx_a28c017b-9170-4749-80cf-60b85681a4e7/nmstate-handler/0.log" Oct 08 08:19:48 crc kubenswrapper[4693]: I1008 08:19:48.875706 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-945gn_ae5b67a9-cb0d-4f73-8353-2bba4708a176/kube-rbac-proxy/0.log" Oct 08 08:19:48 crc kubenswrapper[4693]: I1008 08:19:48.926353 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-945gn_ae5b67a9-cb0d-4f73-8353-2bba4708a176/nmstate-metrics/0.log" Oct 08 08:19:49 crc kubenswrapper[4693]: I1008 08:19:49.064495 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-48qbg_55f815ee-eb64-4f69-b192-081c71664f3b/nmstate-operator/0.log" Oct 08 08:19:49 crc kubenswrapper[4693]: I1008 08:19:49.104050 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-fpzdq_69abf05d-a12d-4255-a1cf-a57efdc57a93/nmstate-webhook/0.log" Oct 08 08:19:58 crc kubenswrapper[4693]: I1008 08:19:58.363939 4693 scope.go:117] "RemoveContainer" containerID="28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" Oct 08 08:19:58 crc kubenswrapper[4693]: E1008 08:19:58.365218 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:20:04 crc kubenswrapper[4693]: I1008 08:20:04.005122 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-nj6r5_99056ede-9949-4966-a265-fc3af4134013/kube-rbac-proxy/0.log" Oct 08 08:20:04 crc kubenswrapper[4693]: I1008 08:20:04.098921 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-nj6r5_99056ede-9949-4966-a265-fc3af4134013/controller/0.log" Oct 08 08:20:04 crc kubenswrapper[4693]: I1008 08:20:04.158284 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/cp-frr-files/0.log" Oct 08 08:20:04 crc kubenswrapper[4693]: I1008 08:20:04.345853 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/cp-reloader/0.log" Oct 08 08:20:04 crc kubenswrapper[4693]: I1008 08:20:04.374344 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/cp-frr-files/0.log" Oct 08 08:20:04 crc kubenswrapper[4693]: I1008 08:20:04.379882 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/cp-metrics/0.log" Oct 08 08:20:04 crc kubenswrapper[4693]: I1008 08:20:04.415206 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/cp-reloader/0.log" Oct 08 08:20:04 crc kubenswrapper[4693]: I1008 08:20:04.599303 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/cp-metrics/0.log" Oct 08 08:20:04 crc kubenswrapper[4693]: I1008 08:20:04.602162 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/cp-reloader/0.log" Oct 08 08:20:04 crc kubenswrapper[4693]: I1008 08:20:04.611659 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/cp-metrics/0.log" Oct 08 08:20:04 crc kubenswrapper[4693]: I1008 08:20:04.616434 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/cp-frr-files/0.log" Oct 08 08:20:04 crc kubenswrapper[4693]: I1008 08:20:04.767336 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/cp-frr-files/0.log" Oct 08 08:20:04 crc kubenswrapper[4693]: I1008 08:20:04.781158 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/cp-metrics/0.log" Oct 08 08:20:04 crc kubenswrapper[4693]: I1008 08:20:04.801261 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/controller/0.log" Oct 08 08:20:04 crc kubenswrapper[4693]: I1008 08:20:04.836487 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/cp-reloader/0.log" Oct 08 08:20:04 crc kubenswrapper[4693]: I1008 08:20:04.936880 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/frr-metrics/0.log" Oct 08 08:20:04 crc kubenswrapper[4693]: I1008 08:20:04.956582 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/kube-rbac-proxy/0.log" Oct 08 08:20:05 crc kubenswrapper[4693]: I1008 08:20:05.021285 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/kube-rbac-proxy-frr/0.log" Oct 08 08:20:05 crc kubenswrapper[4693]: I1008 08:20:05.177577 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/reloader/0.log" Oct 08 08:20:05 crc kubenswrapper[4693]: I1008 08:20:05.249952 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-4857d_b17ddd86-5c6e-4898-a859-ce43f604fc10/frr-k8s-webhook-server/0.log" Oct 08 08:20:05 crc kubenswrapper[4693]: I1008 08:20:05.443484 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-6b6d7649c4-6krt4_20b2cb2b-9d01-44fa-a40e-2375df3a92d7/manager/0.log" Oct 08 08:20:05 crc kubenswrapper[4693]: I1008 08:20:05.593701 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6559c8fcd-4bbqs_9db7ae35-d57f-4342-8a8c-ff3613e28905/webhook-server/0.log" Oct 08 08:20:05 crc kubenswrapper[4693]: I1008 08:20:05.664713 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-tjkwg_88513bc0-a703-458f-a001-d6a636023c45/kube-rbac-proxy/0.log" Oct 08 08:20:06 crc kubenswrapper[4693]: I1008 08:20:06.165508 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-tjkwg_88513bc0-a703-458f-a001-d6a636023c45/speaker/0.log" Oct 08 08:20:06 crc kubenswrapper[4693]: I1008 08:20:06.366775 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/frr/0.log" Oct 08 08:20:11 crc kubenswrapper[4693]: I1008 08:20:11.362693 4693 scope.go:117] "RemoveContainer" containerID="28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" Oct 08 08:20:11 crc kubenswrapper[4693]: E1008 08:20:11.364286 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:20:19 crc kubenswrapper[4693]: I1008 08:20:19.559241 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j_b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9/util/0.log" Oct 08 08:20:19 crc kubenswrapper[4693]: I1008 08:20:19.794941 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j_b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9/pull/0.log" Oct 08 08:20:19 crc kubenswrapper[4693]: I1008 08:20:19.797276 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j_b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9/util/0.log" Oct 08 08:20:19 crc kubenswrapper[4693]: I1008 08:20:19.875547 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j_b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9/pull/0.log" Oct 08 08:20:20 crc kubenswrapper[4693]: I1008 08:20:20.016054 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j_b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9/util/0.log" Oct 08 08:20:20 crc kubenswrapper[4693]: I1008 08:20:20.038888 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j_b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9/pull/0.log" Oct 08 08:20:20 crc kubenswrapper[4693]: I1008 08:20:20.071196 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j_b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9/extract/0.log" Oct 08 08:20:20 crc kubenswrapper[4693]: I1008 08:20:20.196343 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-n4jbg_5331821d-f991-4245-9a76-c889657a38b8/extract-utilities/0.log" Oct 08 08:20:20 crc kubenswrapper[4693]: I1008 08:20:20.394145 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-n4jbg_5331821d-f991-4245-9a76-c889657a38b8/extract-content/0.log" Oct 08 08:20:20 crc kubenswrapper[4693]: I1008 08:20:20.429802 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-n4jbg_5331821d-f991-4245-9a76-c889657a38b8/extract-content/0.log" Oct 08 08:20:20 crc kubenswrapper[4693]: I1008 08:20:20.460290 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-n4jbg_5331821d-f991-4245-9a76-c889657a38b8/extract-utilities/0.log" Oct 08 08:20:20 crc kubenswrapper[4693]: I1008 08:20:20.596763 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-n4jbg_5331821d-f991-4245-9a76-c889657a38b8/extract-content/0.log" Oct 08 08:20:20 crc kubenswrapper[4693]: I1008 08:20:20.637149 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-n4jbg_5331821d-f991-4245-9a76-c889657a38b8/extract-utilities/0.log" Oct 08 08:20:20 crc kubenswrapper[4693]: I1008 08:20:20.978381 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-n4jbg_5331821d-f991-4245-9a76-c889657a38b8/registry-server/0.log" Oct 08 08:20:21 crc kubenswrapper[4693]: I1008 08:20:21.011077 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vt599_f4f1924c-7d6d-4c94-9903-33499a98ffb1/extract-utilities/0.log" Oct 08 08:20:21 crc kubenswrapper[4693]: I1008 08:20:21.164509 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vt599_f4f1924c-7d6d-4c94-9903-33499a98ffb1/extract-content/0.log" Oct 08 08:20:21 crc kubenswrapper[4693]: I1008 08:20:21.166450 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vt599_f4f1924c-7d6d-4c94-9903-33499a98ffb1/extract-utilities/0.log" Oct 08 08:20:21 crc kubenswrapper[4693]: I1008 08:20:21.178774 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vt599_f4f1924c-7d6d-4c94-9903-33499a98ffb1/extract-content/0.log" Oct 08 08:20:21 crc kubenswrapper[4693]: I1008 08:20:21.374225 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vt599_f4f1924c-7d6d-4c94-9903-33499a98ffb1/extract-utilities/0.log" Oct 08 08:20:21 crc kubenswrapper[4693]: I1008 08:20:21.383311 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vt599_f4f1924c-7d6d-4c94-9903-33499a98ffb1/extract-content/0.log" Oct 08 08:20:21 crc kubenswrapper[4693]: I1008 08:20:21.651049 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2_2378198d-e3ee-4cdc-a298-0d386fdf78ae/util/0.log" Oct 08 08:20:21 crc kubenswrapper[4693]: I1008 08:20:21.808353 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vt599_f4f1924c-7d6d-4c94-9903-33499a98ffb1/registry-server/0.log" Oct 08 08:20:21 crc kubenswrapper[4693]: I1008 08:20:21.847995 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2_2378198d-e3ee-4cdc-a298-0d386fdf78ae/pull/0.log" Oct 08 08:20:21 crc kubenswrapper[4693]: I1008 08:20:21.879877 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2_2378198d-e3ee-4cdc-a298-0d386fdf78ae/pull/0.log" Oct 08 08:20:21 crc kubenswrapper[4693]: I1008 08:20:21.882256 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2_2378198d-e3ee-4cdc-a298-0d386fdf78ae/util/0.log" Oct 08 08:20:22 crc kubenswrapper[4693]: I1008 08:20:22.026809 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2_2378198d-e3ee-4cdc-a298-0d386fdf78ae/pull/0.log" Oct 08 08:20:22 crc kubenswrapper[4693]: I1008 08:20:22.039963 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2_2378198d-e3ee-4cdc-a298-0d386fdf78ae/util/0.log" Oct 08 08:20:22 crc kubenswrapper[4693]: I1008 08:20:22.092326 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2_2378198d-e3ee-4cdc-a298-0d386fdf78ae/extract/0.log" Oct 08 08:20:22 crc kubenswrapper[4693]: I1008 08:20:22.246940 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-nkdmr_9c82bb62-a293-463f-ba14-c6fcf26e3a90/marketplace-operator/0.log" Oct 08 08:20:22 crc kubenswrapper[4693]: I1008 08:20:22.342353 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-sz5qh_fa717bec-7159-42c4-98b4-65eca2bd583b/extract-utilities/0.log" Oct 08 08:20:22 crc kubenswrapper[4693]: I1008 08:20:22.363329 4693 scope.go:117] "RemoveContainer" containerID="28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" Oct 08 08:20:22 crc kubenswrapper[4693]: E1008 08:20:22.363636 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:20:22 crc kubenswrapper[4693]: I1008 08:20:22.470433 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-sz5qh_fa717bec-7159-42c4-98b4-65eca2bd583b/extract-content/0.log" Oct 08 08:20:22 crc kubenswrapper[4693]: I1008 08:20:22.511146 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-sz5qh_fa717bec-7159-42c4-98b4-65eca2bd583b/extract-utilities/0.log" Oct 08 08:20:22 crc kubenswrapper[4693]: I1008 08:20:22.550032 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-sz5qh_fa717bec-7159-42c4-98b4-65eca2bd583b/extract-content/0.log" Oct 08 08:20:22 crc kubenswrapper[4693]: I1008 08:20:22.674406 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-sz5qh_fa717bec-7159-42c4-98b4-65eca2bd583b/extract-content/0.log" Oct 08 08:20:22 crc kubenswrapper[4693]: I1008 08:20:22.675714 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-sz5qh_fa717bec-7159-42c4-98b4-65eca2bd583b/extract-utilities/0.log" Oct 08 08:20:22 crc kubenswrapper[4693]: I1008 08:20:22.800953 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-sz5qh_fa717bec-7159-42c4-98b4-65eca2bd583b/registry-server/0.log" Oct 08 08:20:22 crc kubenswrapper[4693]: I1008 08:20:22.840489 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ccgvl_a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51/extract-utilities/0.log" Oct 08 08:20:23 crc kubenswrapper[4693]: I1008 08:20:23.079525 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ccgvl_a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51/extract-content/0.log" Oct 08 08:20:23 crc kubenswrapper[4693]: I1008 08:20:23.089057 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ccgvl_a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51/extract-utilities/0.log" Oct 08 08:20:23 crc kubenswrapper[4693]: I1008 08:20:23.119632 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ccgvl_a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51/extract-content/0.log" Oct 08 08:20:23 crc kubenswrapper[4693]: I1008 08:20:23.277686 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ccgvl_a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51/extract-utilities/0.log" Oct 08 08:20:23 crc kubenswrapper[4693]: I1008 08:20:23.317436 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ccgvl_a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51/extract-content/0.log" Oct 08 08:20:23 crc kubenswrapper[4693]: I1008 08:20:23.725330 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ccgvl_a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51/registry-server/0.log" Oct 08 08:20:34 crc kubenswrapper[4693]: I1008 08:20:34.363210 4693 scope.go:117] "RemoveContainer" containerID="28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" Oct 08 08:20:34 crc kubenswrapper[4693]: E1008 08:20:34.364168 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:20:46 crc kubenswrapper[4693]: I1008 08:20:46.362776 4693 scope.go:117] "RemoveContainer" containerID="28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" Oct 08 08:20:46 crc kubenswrapper[4693]: E1008 08:20:46.363801 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:20:57 crc kubenswrapper[4693]: I1008 08:20:57.364612 4693 scope.go:117] "RemoveContainer" containerID="28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" Oct 08 08:20:57 crc kubenswrapper[4693]: I1008 08:20:57.984055 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerStarted","Data":"f7ef691dbf7078b04c5820d01b35339f572409288d9df3c27d07056f58129fe4"} Oct 08 08:21:25 crc kubenswrapper[4693]: I1008 08:21:25.043533 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5qwmr"] Oct 08 08:21:25 crc kubenswrapper[4693]: E1008 08:21:25.044568 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead" containerName="container-00" Oct 08 08:21:25 crc kubenswrapper[4693]: I1008 08:21:25.044585 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead" containerName="container-00" Oct 08 08:21:25 crc kubenswrapper[4693]: I1008 08:21:25.044844 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fd4ebc6-1af4-4888-be70-1f5e8f3a5ead" containerName="container-00" Oct 08 08:21:25 crc kubenswrapper[4693]: I1008 08:21:25.046940 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5qwmr" Oct 08 08:21:25 crc kubenswrapper[4693]: I1008 08:21:25.065510 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5qwmr"] Oct 08 08:21:25 crc kubenswrapper[4693]: I1008 08:21:25.198013 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/472c4362-1f2b-4b5e-b300-23955dfb86c8-utilities\") pod \"redhat-marketplace-5qwmr\" (UID: \"472c4362-1f2b-4b5e-b300-23955dfb86c8\") " pod="openshift-marketplace/redhat-marketplace-5qwmr" Oct 08 08:21:25 crc kubenswrapper[4693]: I1008 08:21:25.198640 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzm9z\" (UniqueName: \"kubernetes.io/projected/472c4362-1f2b-4b5e-b300-23955dfb86c8-kube-api-access-wzm9z\") pod \"redhat-marketplace-5qwmr\" (UID: \"472c4362-1f2b-4b5e-b300-23955dfb86c8\") " pod="openshift-marketplace/redhat-marketplace-5qwmr" Oct 08 08:21:25 crc kubenswrapper[4693]: I1008 08:21:25.198714 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/472c4362-1f2b-4b5e-b300-23955dfb86c8-catalog-content\") pod \"redhat-marketplace-5qwmr\" (UID: \"472c4362-1f2b-4b5e-b300-23955dfb86c8\") " pod="openshift-marketplace/redhat-marketplace-5qwmr" Oct 08 08:21:25 crc kubenswrapper[4693]: I1008 08:21:25.300503 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzm9z\" (UniqueName: \"kubernetes.io/projected/472c4362-1f2b-4b5e-b300-23955dfb86c8-kube-api-access-wzm9z\") pod \"redhat-marketplace-5qwmr\" (UID: \"472c4362-1f2b-4b5e-b300-23955dfb86c8\") " pod="openshift-marketplace/redhat-marketplace-5qwmr" Oct 08 08:21:25 crc kubenswrapper[4693]: I1008 08:21:25.300554 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/472c4362-1f2b-4b5e-b300-23955dfb86c8-catalog-content\") pod \"redhat-marketplace-5qwmr\" (UID: \"472c4362-1f2b-4b5e-b300-23955dfb86c8\") " pod="openshift-marketplace/redhat-marketplace-5qwmr" Oct 08 08:21:25 crc kubenswrapper[4693]: I1008 08:21:25.300607 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/472c4362-1f2b-4b5e-b300-23955dfb86c8-utilities\") pod \"redhat-marketplace-5qwmr\" (UID: \"472c4362-1f2b-4b5e-b300-23955dfb86c8\") " pod="openshift-marketplace/redhat-marketplace-5qwmr" Oct 08 08:21:25 crc kubenswrapper[4693]: I1008 08:21:25.301172 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/472c4362-1f2b-4b5e-b300-23955dfb86c8-catalog-content\") pod \"redhat-marketplace-5qwmr\" (UID: \"472c4362-1f2b-4b5e-b300-23955dfb86c8\") " pod="openshift-marketplace/redhat-marketplace-5qwmr" Oct 08 08:21:25 crc kubenswrapper[4693]: I1008 08:21:25.301240 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/472c4362-1f2b-4b5e-b300-23955dfb86c8-utilities\") pod \"redhat-marketplace-5qwmr\" (UID: \"472c4362-1f2b-4b5e-b300-23955dfb86c8\") " pod="openshift-marketplace/redhat-marketplace-5qwmr" Oct 08 08:21:25 crc kubenswrapper[4693]: I1008 08:21:25.326846 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzm9z\" (UniqueName: \"kubernetes.io/projected/472c4362-1f2b-4b5e-b300-23955dfb86c8-kube-api-access-wzm9z\") pod \"redhat-marketplace-5qwmr\" (UID: \"472c4362-1f2b-4b5e-b300-23955dfb86c8\") " pod="openshift-marketplace/redhat-marketplace-5qwmr" Oct 08 08:21:25 crc kubenswrapper[4693]: I1008 08:21:25.379287 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5qwmr" Oct 08 08:21:25 crc kubenswrapper[4693]: I1008 08:21:25.836961 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5qwmr"] Oct 08 08:21:26 crc kubenswrapper[4693]: I1008 08:21:26.318316 4693 generic.go:334] "Generic (PLEG): container finished" podID="472c4362-1f2b-4b5e-b300-23955dfb86c8" containerID="5b6ddb22b9beaa47c11bcdc7bee98eb32fbfeca6fc7026f78fbf39a4698212ab" exitCode=0 Oct 08 08:21:26 crc kubenswrapper[4693]: I1008 08:21:26.318404 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5qwmr" event={"ID":"472c4362-1f2b-4b5e-b300-23955dfb86c8","Type":"ContainerDied","Data":"5b6ddb22b9beaa47c11bcdc7bee98eb32fbfeca6fc7026f78fbf39a4698212ab"} Oct 08 08:21:26 crc kubenswrapper[4693]: I1008 08:21:26.318740 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5qwmr" event={"ID":"472c4362-1f2b-4b5e-b300-23955dfb86c8","Type":"ContainerStarted","Data":"bda53d799644bfc8c7fa19dc79a00d44359f46dde0407e091b0bcd3d8b44d622"} Oct 08 08:21:26 crc kubenswrapper[4693]: I1008 08:21:26.323644 4693 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 08 08:21:27 crc kubenswrapper[4693]: I1008 08:21:27.334438 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5qwmr" event={"ID":"472c4362-1f2b-4b5e-b300-23955dfb86c8","Type":"ContainerStarted","Data":"249f786e7e9c422a60894fb1a3375c76942156bddbb7077bf7fb8e70979fe15a"} Oct 08 08:21:28 crc kubenswrapper[4693]: I1008 08:21:28.350448 4693 generic.go:334] "Generic (PLEG): container finished" podID="472c4362-1f2b-4b5e-b300-23955dfb86c8" containerID="249f786e7e9c422a60894fb1a3375c76942156bddbb7077bf7fb8e70979fe15a" exitCode=0 Oct 08 08:21:28 crc kubenswrapper[4693]: I1008 08:21:28.350541 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5qwmr" event={"ID":"472c4362-1f2b-4b5e-b300-23955dfb86c8","Type":"ContainerDied","Data":"249f786e7e9c422a60894fb1a3375c76942156bddbb7077bf7fb8e70979fe15a"} Oct 08 08:21:30 crc kubenswrapper[4693]: I1008 08:21:30.375770 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5qwmr" event={"ID":"472c4362-1f2b-4b5e-b300-23955dfb86c8","Type":"ContainerStarted","Data":"8477984927a170b495abc00acb5b636045c8392acd181608088cf5f21c3689ac"} Oct 08 08:21:30 crc kubenswrapper[4693]: I1008 08:21:30.421576 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5qwmr" podStartSLOduration=2.577604442 podStartE2EDuration="5.421558901s" podCreationTimestamp="2025-10-08 08:21:25 +0000 UTC" firstStartedPulling="2025-10-08 08:21:26.323242095 +0000 UTC m=+3871.694207060" lastFinishedPulling="2025-10-08 08:21:29.167196584 +0000 UTC m=+3874.538161519" observedRunningTime="2025-10-08 08:21:30.42036088 +0000 UTC m=+3875.791325825" watchObservedRunningTime="2025-10-08 08:21:30.421558901 +0000 UTC m=+3875.792523836" Oct 08 08:21:35 crc kubenswrapper[4693]: I1008 08:21:35.380157 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5qwmr" Oct 08 08:21:35 crc kubenswrapper[4693]: I1008 08:21:35.380862 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5qwmr" Oct 08 08:21:35 crc kubenswrapper[4693]: I1008 08:21:35.449446 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5qwmr" Oct 08 08:21:35 crc kubenswrapper[4693]: I1008 08:21:35.510293 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5qwmr" Oct 08 08:21:35 crc kubenswrapper[4693]: I1008 08:21:35.684555 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5qwmr"] Oct 08 08:21:37 crc kubenswrapper[4693]: I1008 08:21:37.464227 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5qwmr" podUID="472c4362-1f2b-4b5e-b300-23955dfb86c8" containerName="registry-server" containerID="cri-o://8477984927a170b495abc00acb5b636045c8392acd181608088cf5f21c3689ac" gracePeriod=2 Oct 08 08:21:37 crc kubenswrapper[4693]: I1008 08:21:37.977022 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5qwmr" Oct 08 08:21:38 crc kubenswrapper[4693]: I1008 08:21:38.016726 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/472c4362-1f2b-4b5e-b300-23955dfb86c8-catalog-content\") pod \"472c4362-1f2b-4b5e-b300-23955dfb86c8\" (UID: \"472c4362-1f2b-4b5e-b300-23955dfb86c8\") " Oct 08 08:21:38 crc kubenswrapper[4693]: I1008 08:21:38.017140 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wzm9z\" (UniqueName: \"kubernetes.io/projected/472c4362-1f2b-4b5e-b300-23955dfb86c8-kube-api-access-wzm9z\") pod \"472c4362-1f2b-4b5e-b300-23955dfb86c8\" (UID: \"472c4362-1f2b-4b5e-b300-23955dfb86c8\") " Oct 08 08:21:38 crc kubenswrapper[4693]: I1008 08:21:38.017471 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/472c4362-1f2b-4b5e-b300-23955dfb86c8-utilities\") pod \"472c4362-1f2b-4b5e-b300-23955dfb86c8\" (UID: \"472c4362-1f2b-4b5e-b300-23955dfb86c8\") " Oct 08 08:21:38 crc kubenswrapper[4693]: I1008 08:21:38.019075 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/472c4362-1f2b-4b5e-b300-23955dfb86c8-utilities" (OuterVolumeSpecName: "utilities") pod "472c4362-1f2b-4b5e-b300-23955dfb86c8" (UID: "472c4362-1f2b-4b5e-b300-23955dfb86c8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:21:38 crc kubenswrapper[4693]: I1008 08:21:38.034392 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/472c4362-1f2b-4b5e-b300-23955dfb86c8-kube-api-access-wzm9z" (OuterVolumeSpecName: "kube-api-access-wzm9z") pod "472c4362-1f2b-4b5e-b300-23955dfb86c8" (UID: "472c4362-1f2b-4b5e-b300-23955dfb86c8"). InnerVolumeSpecName "kube-api-access-wzm9z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 08:21:38 crc kubenswrapper[4693]: I1008 08:21:38.048993 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/472c4362-1f2b-4b5e-b300-23955dfb86c8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "472c4362-1f2b-4b5e-b300-23955dfb86c8" (UID: "472c4362-1f2b-4b5e-b300-23955dfb86c8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:21:38 crc kubenswrapper[4693]: I1008 08:21:38.119932 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wzm9z\" (UniqueName: \"kubernetes.io/projected/472c4362-1f2b-4b5e-b300-23955dfb86c8-kube-api-access-wzm9z\") on node \"crc\" DevicePath \"\"" Oct 08 08:21:38 crc kubenswrapper[4693]: I1008 08:21:38.119990 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/472c4362-1f2b-4b5e-b300-23955dfb86c8-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 08:21:38 crc kubenswrapper[4693]: I1008 08:21:38.120007 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/472c4362-1f2b-4b5e-b300-23955dfb86c8-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 08:21:38 crc kubenswrapper[4693]: I1008 08:21:38.479489 4693 generic.go:334] "Generic (PLEG): container finished" podID="472c4362-1f2b-4b5e-b300-23955dfb86c8" containerID="8477984927a170b495abc00acb5b636045c8392acd181608088cf5f21c3689ac" exitCode=0 Oct 08 08:21:38 crc kubenswrapper[4693]: I1008 08:21:38.481337 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5qwmr" event={"ID":"472c4362-1f2b-4b5e-b300-23955dfb86c8","Type":"ContainerDied","Data":"8477984927a170b495abc00acb5b636045c8392acd181608088cf5f21c3689ac"} Oct 08 08:21:38 crc kubenswrapper[4693]: I1008 08:21:38.481528 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5qwmr" event={"ID":"472c4362-1f2b-4b5e-b300-23955dfb86c8","Type":"ContainerDied","Data":"bda53d799644bfc8c7fa19dc79a00d44359f46dde0407e091b0bcd3d8b44d622"} Oct 08 08:21:38 crc kubenswrapper[4693]: I1008 08:21:38.481677 4693 scope.go:117] "RemoveContainer" containerID="8477984927a170b495abc00acb5b636045c8392acd181608088cf5f21c3689ac" Oct 08 08:21:38 crc kubenswrapper[4693]: I1008 08:21:38.482028 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5qwmr" Oct 08 08:21:38 crc kubenswrapper[4693]: I1008 08:21:38.544238 4693 scope.go:117] "RemoveContainer" containerID="249f786e7e9c422a60894fb1a3375c76942156bddbb7077bf7fb8e70979fe15a" Oct 08 08:21:38 crc kubenswrapper[4693]: I1008 08:21:38.554451 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5qwmr"] Oct 08 08:21:38 crc kubenswrapper[4693]: I1008 08:21:38.569033 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5qwmr"] Oct 08 08:21:38 crc kubenswrapper[4693]: I1008 08:21:38.570072 4693 scope.go:117] "RemoveContainer" containerID="5b6ddb22b9beaa47c11bcdc7bee98eb32fbfeca6fc7026f78fbf39a4698212ab" Oct 08 08:21:38 crc kubenswrapper[4693]: I1008 08:21:38.637394 4693 scope.go:117] "RemoveContainer" containerID="8477984927a170b495abc00acb5b636045c8392acd181608088cf5f21c3689ac" Oct 08 08:21:38 crc kubenswrapper[4693]: E1008 08:21:38.638041 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8477984927a170b495abc00acb5b636045c8392acd181608088cf5f21c3689ac\": container with ID starting with 8477984927a170b495abc00acb5b636045c8392acd181608088cf5f21c3689ac not found: ID does not exist" containerID="8477984927a170b495abc00acb5b636045c8392acd181608088cf5f21c3689ac" Oct 08 08:21:38 crc kubenswrapper[4693]: I1008 08:21:38.638140 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8477984927a170b495abc00acb5b636045c8392acd181608088cf5f21c3689ac"} err="failed to get container status \"8477984927a170b495abc00acb5b636045c8392acd181608088cf5f21c3689ac\": rpc error: code = NotFound desc = could not find container \"8477984927a170b495abc00acb5b636045c8392acd181608088cf5f21c3689ac\": container with ID starting with 8477984927a170b495abc00acb5b636045c8392acd181608088cf5f21c3689ac not found: ID does not exist" Oct 08 08:21:38 crc kubenswrapper[4693]: I1008 08:21:38.638196 4693 scope.go:117] "RemoveContainer" containerID="249f786e7e9c422a60894fb1a3375c76942156bddbb7077bf7fb8e70979fe15a" Oct 08 08:21:38 crc kubenswrapper[4693]: E1008 08:21:38.638962 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"249f786e7e9c422a60894fb1a3375c76942156bddbb7077bf7fb8e70979fe15a\": container with ID starting with 249f786e7e9c422a60894fb1a3375c76942156bddbb7077bf7fb8e70979fe15a not found: ID does not exist" containerID="249f786e7e9c422a60894fb1a3375c76942156bddbb7077bf7fb8e70979fe15a" Oct 08 08:21:38 crc kubenswrapper[4693]: I1008 08:21:38.639028 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"249f786e7e9c422a60894fb1a3375c76942156bddbb7077bf7fb8e70979fe15a"} err="failed to get container status \"249f786e7e9c422a60894fb1a3375c76942156bddbb7077bf7fb8e70979fe15a\": rpc error: code = NotFound desc = could not find container \"249f786e7e9c422a60894fb1a3375c76942156bddbb7077bf7fb8e70979fe15a\": container with ID starting with 249f786e7e9c422a60894fb1a3375c76942156bddbb7077bf7fb8e70979fe15a not found: ID does not exist" Oct 08 08:21:38 crc kubenswrapper[4693]: I1008 08:21:38.639069 4693 scope.go:117] "RemoveContainer" containerID="5b6ddb22b9beaa47c11bcdc7bee98eb32fbfeca6fc7026f78fbf39a4698212ab" Oct 08 08:21:38 crc kubenswrapper[4693]: E1008 08:21:38.639617 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b6ddb22b9beaa47c11bcdc7bee98eb32fbfeca6fc7026f78fbf39a4698212ab\": container with ID starting with 5b6ddb22b9beaa47c11bcdc7bee98eb32fbfeca6fc7026f78fbf39a4698212ab not found: ID does not exist" containerID="5b6ddb22b9beaa47c11bcdc7bee98eb32fbfeca6fc7026f78fbf39a4698212ab" Oct 08 08:21:38 crc kubenswrapper[4693]: I1008 08:21:38.639655 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b6ddb22b9beaa47c11bcdc7bee98eb32fbfeca6fc7026f78fbf39a4698212ab"} err="failed to get container status \"5b6ddb22b9beaa47c11bcdc7bee98eb32fbfeca6fc7026f78fbf39a4698212ab\": rpc error: code = NotFound desc = could not find container \"5b6ddb22b9beaa47c11bcdc7bee98eb32fbfeca6fc7026f78fbf39a4698212ab\": container with ID starting with 5b6ddb22b9beaa47c11bcdc7bee98eb32fbfeca6fc7026f78fbf39a4698212ab not found: ID does not exist" Oct 08 08:21:39 crc kubenswrapper[4693]: I1008 08:21:39.394661 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="472c4362-1f2b-4b5e-b300-23955dfb86c8" path="/var/lib/kubelet/pods/472c4362-1f2b-4b5e-b300-23955dfb86c8/volumes" Oct 08 08:22:14 crc kubenswrapper[4693]: I1008 08:22:14.922148 4693 generic.go:334] "Generic (PLEG): container finished" podID="ac648e9b-4eb4-4f0c-a108-f103f385b3f8" containerID="80c673d6a679f8844fe081b68ff0615be5c3fefe853aee848e86346889a3acc4" exitCode=0 Oct 08 08:22:14 crc kubenswrapper[4693]: I1008 08:22:14.922231 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nth5x/must-gather-7r9gb" event={"ID":"ac648e9b-4eb4-4f0c-a108-f103f385b3f8","Type":"ContainerDied","Data":"80c673d6a679f8844fe081b68ff0615be5c3fefe853aee848e86346889a3acc4"} Oct 08 08:22:14 crc kubenswrapper[4693]: I1008 08:22:14.923356 4693 scope.go:117] "RemoveContainer" containerID="80c673d6a679f8844fe081b68ff0615be5c3fefe853aee848e86346889a3acc4" Oct 08 08:22:15 crc kubenswrapper[4693]: I1008 08:22:15.438455 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-nth5x_must-gather-7r9gb_ac648e9b-4eb4-4f0c-a108-f103f385b3f8/gather/0.log" Oct 08 08:22:23 crc kubenswrapper[4693]: I1008 08:22:23.916964 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-nth5x/must-gather-7r9gb"] Oct 08 08:22:23 crc kubenswrapper[4693]: I1008 08:22:23.917653 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-nth5x/must-gather-7r9gb" podUID="ac648e9b-4eb4-4f0c-a108-f103f385b3f8" containerName="copy" containerID="cri-o://2fd2c11d873009e88ac755b29de042f07bec9d09dd6c299751f7f07787ffd4fd" gracePeriod=2 Oct 08 08:22:23 crc kubenswrapper[4693]: I1008 08:22:23.927650 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-nth5x/must-gather-7r9gb"] Oct 08 08:22:24 crc kubenswrapper[4693]: I1008 08:22:24.402763 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-nth5x_must-gather-7r9gb_ac648e9b-4eb4-4f0c-a108-f103f385b3f8/copy/0.log" Oct 08 08:22:24 crc kubenswrapper[4693]: I1008 08:22:24.403425 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nth5x/must-gather-7r9gb" Oct 08 08:22:24 crc kubenswrapper[4693]: I1008 08:22:24.555257 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8brlx\" (UniqueName: \"kubernetes.io/projected/ac648e9b-4eb4-4f0c-a108-f103f385b3f8-kube-api-access-8brlx\") pod \"ac648e9b-4eb4-4f0c-a108-f103f385b3f8\" (UID: \"ac648e9b-4eb4-4f0c-a108-f103f385b3f8\") " Oct 08 08:22:24 crc kubenswrapper[4693]: I1008 08:22:24.555369 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ac648e9b-4eb4-4f0c-a108-f103f385b3f8-must-gather-output\") pod \"ac648e9b-4eb4-4f0c-a108-f103f385b3f8\" (UID: \"ac648e9b-4eb4-4f0c-a108-f103f385b3f8\") " Oct 08 08:22:24 crc kubenswrapper[4693]: I1008 08:22:24.576077 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac648e9b-4eb4-4f0c-a108-f103f385b3f8-kube-api-access-8brlx" (OuterVolumeSpecName: "kube-api-access-8brlx") pod "ac648e9b-4eb4-4f0c-a108-f103f385b3f8" (UID: "ac648e9b-4eb4-4f0c-a108-f103f385b3f8"). InnerVolumeSpecName "kube-api-access-8brlx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 08:22:24 crc kubenswrapper[4693]: I1008 08:22:24.657476 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8brlx\" (UniqueName: \"kubernetes.io/projected/ac648e9b-4eb4-4f0c-a108-f103f385b3f8-kube-api-access-8brlx\") on node \"crc\" DevicePath \"\"" Oct 08 08:22:24 crc kubenswrapper[4693]: I1008 08:22:24.708310 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac648e9b-4eb4-4f0c-a108-f103f385b3f8-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "ac648e9b-4eb4-4f0c-a108-f103f385b3f8" (UID: "ac648e9b-4eb4-4f0c-a108-f103f385b3f8"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:22:24 crc kubenswrapper[4693]: I1008 08:22:24.759341 4693 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ac648e9b-4eb4-4f0c-a108-f103f385b3f8-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 08 08:22:25 crc kubenswrapper[4693]: I1008 08:22:25.042661 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-nth5x_must-gather-7r9gb_ac648e9b-4eb4-4f0c-a108-f103f385b3f8/copy/0.log" Oct 08 08:22:25 crc kubenswrapper[4693]: I1008 08:22:25.043016 4693 generic.go:334] "Generic (PLEG): container finished" podID="ac648e9b-4eb4-4f0c-a108-f103f385b3f8" containerID="2fd2c11d873009e88ac755b29de042f07bec9d09dd6c299751f7f07787ffd4fd" exitCode=143 Oct 08 08:22:25 crc kubenswrapper[4693]: I1008 08:22:25.043060 4693 scope.go:117] "RemoveContainer" containerID="2fd2c11d873009e88ac755b29de042f07bec9d09dd6c299751f7f07787ffd4fd" Oct 08 08:22:25 crc kubenswrapper[4693]: I1008 08:22:25.043201 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nth5x/must-gather-7r9gb" Oct 08 08:22:25 crc kubenswrapper[4693]: I1008 08:22:25.072288 4693 scope.go:117] "RemoveContainer" containerID="80c673d6a679f8844fe081b68ff0615be5c3fefe853aee848e86346889a3acc4" Oct 08 08:22:25 crc kubenswrapper[4693]: I1008 08:22:25.148196 4693 scope.go:117] "RemoveContainer" containerID="2fd2c11d873009e88ac755b29de042f07bec9d09dd6c299751f7f07787ffd4fd" Oct 08 08:22:25 crc kubenswrapper[4693]: E1008 08:22:25.148614 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2fd2c11d873009e88ac755b29de042f07bec9d09dd6c299751f7f07787ffd4fd\": container with ID starting with 2fd2c11d873009e88ac755b29de042f07bec9d09dd6c299751f7f07787ffd4fd not found: ID does not exist" containerID="2fd2c11d873009e88ac755b29de042f07bec9d09dd6c299751f7f07787ffd4fd" Oct 08 08:22:25 crc kubenswrapper[4693]: I1008 08:22:25.148657 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fd2c11d873009e88ac755b29de042f07bec9d09dd6c299751f7f07787ffd4fd"} err="failed to get container status \"2fd2c11d873009e88ac755b29de042f07bec9d09dd6c299751f7f07787ffd4fd\": rpc error: code = NotFound desc = could not find container \"2fd2c11d873009e88ac755b29de042f07bec9d09dd6c299751f7f07787ffd4fd\": container with ID starting with 2fd2c11d873009e88ac755b29de042f07bec9d09dd6c299751f7f07787ffd4fd not found: ID does not exist" Oct 08 08:22:25 crc kubenswrapper[4693]: I1008 08:22:25.148682 4693 scope.go:117] "RemoveContainer" containerID="80c673d6a679f8844fe081b68ff0615be5c3fefe853aee848e86346889a3acc4" Oct 08 08:22:25 crc kubenswrapper[4693]: E1008 08:22:25.149315 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80c673d6a679f8844fe081b68ff0615be5c3fefe853aee848e86346889a3acc4\": container with ID starting with 80c673d6a679f8844fe081b68ff0615be5c3fefe853aee848e86346889a3acc4 not found: ID does not exist" containerID="80c673d6a679f8844fe081b68ff0615be5c3fefe853aee848e86346889a3acc4" Oct 08 08:22:25 crc kubenswrapper[4693]: I1008 08:22:25.149348 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80c673d6a679f8844fe081b68ff0615be5c3fefe853aee848e86346889a3acc4"} err="failed to get container status \"80c673d6a679f8844fe081b68ff0615be5c3fefe853aee848e86346889a3acc4\": rpc error: code = NotFound desc = could not find container \"80c673d6a679f8844fe081b68ff0615be5c3fefe853aee848e86346889a3acc4\": container with ID starting with 80c673d6a679f8844fe081b68ff0615be5c3fefe853aee848e86346889a3acc4 not found: ID does not exist" Oct 08 08:22:25 crc kubenswrapper[4693]: I1008 08:22:25.383730 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac648e9b-4eb4-4f0c-a108-f103f385b3f8" path="/var/lib/kubelet/pods/ac648e9b-4eb4-4f0c-a108-f103f385b3f8/volumes" Oct 08 08:23:00 crc kubenswrapper[4693]: I1008 08:23:00.101368 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-rtcj7/must-gather-jd4b4"] Oct 08 08:23:00 crc kubenswrapper[4693]: E1008 08:23:00.102398 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac648e9b-4eb4-4f0c-a108-f103f385b3f8" containerName="gather" Oct 08 08:23:00 crc kubenswrapper[4693]: I1008 08:23:00.102414 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac648e9b-4eb4-4f0c-a108-f103f385b3f8" containerName="gather" Oct 08 08:23:00 crc kubenswrapper[4693]: E1008 08:23:00.102443 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="472c4362-1f2b-4b5e-b300-23955dfb86c8" containerName="extract-utilities" Oct 08 08:23:00 crc kubenswrapper[4693]: I1008 08:23:00.102451 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="472c4362-1f2b-4b5e-b300-23955dfb86c8" containerName="extract-utilities" Oct 08 08:23:00 crc kubenswrapper[4693]: E1008 08:23:00.102482 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="472c4362-1f2b-4b5e-b300-23955dfb86c8" containerName="extract-content" Oct 08 08:23:00 crc kubenswrapper[4693]: I1008 08:23:00.102490 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="472c4362-1f2b-4b5e-b300-23955dfb86c8" containerName="extract-content" Oct 08 08:23:00 crc kubenswrapper[4693]: E1008 08:23:00.102518 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac648e9b-4eb4-4f0c-a108-f103f385b3f8" containerName="copy" Oct 08 08:23:00 crc kubenswrapper[4693]: I1008 08:23:00.102525 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac648e9b-4eb4-4f0c-a108-f103f385b3f8" containerName="copy" Oct 08 08:23:00 crc kubenswrapper[4693]: E1008 08:23:00.102538 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="472c4362-1f2b-4b5e-b300-23955dfb86c8" containerName="registry-server" Oct 08 08:23:00 crc kubenswrapper[4693]: I1008 08:23:00.102545 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="472c4362-1f2b-4b5e-b300-23955dfb86c8" containerName="registry-server" Oct 08 08:23:00 crc kubenswrapper[4693]: I1008 08:23:00.102753 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="472c4362-1f2b-4b5e-b300-23955dfb86c8" containerName="registry-server" Oct 08 08:23:00 crc kubenswrapper[4693]: I1008 08:23:00.102771 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac648e9b-4eb4-4f0c-a108-f103f385b3f8" containerName="copy" Oct 08 08:23:00 crc kubenswrapper[4693]: I1008 08:23:00.102794 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac648e9b-4eb4-4f0c-a108-f103f385b3f8" containerName="gather" Oct 08 08:23:00 crc kubenswrapper[4693]: I1008 08:23:00.104499 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rtcj7/must-gather-jd4b4" Oct 08 08:23:00 crc kubenswrapper[4693]: I1008 08:23:00.115798 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-rtcj7"/"openshift-service-ca.crt" Oct 08 08:23:00 crc kubenswrapper[4693]: I1008 08:23:00.116147 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-rtcj7"/"kube-root-ca.crt" Oct 08 08:23:00 crc kubenswrapper[4693]: I1008 08:23:00.123313 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-rtcj7/must-gather-jd4b4"] Oct 08 08:23:00 crc kubenswrapper[4693]: I1008 08:23:00.198478 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f8598397-df5e-4a4b-ac22-4348239ab87f-must-gather-output\") pod \"must-gather-jd4b4\" (UID: \"f8598397-df5e-4a4b-ac22-4348239ab87f\") " pod="openshift-must-gather-rtcj7/must-gather-jd4b4" Oct 08 08:23:00 crc kubenswrapper[4693]: I1008 08:23:00.198550 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82dh2\" (UniqueName: \"kubernetes.io/projected/f8598397-df5e-4a4b-ac22-4348239ab87f-kube-api-access-82dh2\") pod \"must-gather-jd4b4\" (UID: \"f8598397-df5e-4a4b-ac22-4348239ab87f\") " pod="openshift-must-gather-rtcj7/must-gather-jd4b4" Oct 08 08:23:00 crc kubenswrapper[4693]: I1008 08:23:00.300357 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f8598397-df5e-4a4b-ac22-4348239ab87f-must-gather-output\") pod \"must-gather-jd4b4\" (UID: \"f8598397-df5e-4a4b-ac22-4348239ab87f\") " pod="openshift-must-gather-rtcj7/must-gather-jd4b4" Oct 08 08:23:00 crc kubenswrapper[4693]: I1008 08:23:00.300414 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82dh2\" (UniqueName: \"kubernetes.io/projected/f8598397-df5e-4a4b-ac22-4348239ab87f-kube-api-access-82dh2\") pod \"must-gather-jd4b4\" (UID: \"f8598397-df5e-4a4b-ac22-4348239ab87f\") " pod="openshift-must-gather-rtcj7/must-gather-jd4b4" Oct 08 08:23:00 crc kubenswrapper[4693]: I1008 08:23:00.300899 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f8598397-df5e-4a4b-ac22-4348239ab87f-must-gather-output\") pod \"must-gather-jd4b4\" (UID: \"f8598397-df5e-4a4b-ac22-4348239ab87f\") " pod="openshift-must-gather-rtcj7/must-gather-jd4b4" Oct 08 08:23:00 crc kubenswrapper[4693]: I1008 08:23:00.321610 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82dh2\" (UniqueName: \"kubernetes.io/projected/f8598397-df5e-4a4b-ac22-4348239ab87f-kube-api-access-82dh2\") pod \"must-gather-jd4b4\" (UID: \"f8598397-df5e-4a4b-ac22-4348239ab87f\") " pod="openshift-must-gather-rtcj7/must-gather-jd4b4" Oct 08 08:23:00 crc kubenswrapper[4693]: I1008 08:23:00.472833 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rtcj7/must-gather-jd4b4" Oct 08 08:23:00 crc kubenswrapper[4693]: I1008 08:23:00.990138 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-rtcj7/must-gather-jd4b4"] Oct 08 08:23:01 crc kubenswrapper[4693]: I1008 08:23:01.482696 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rtcj7/must-gather-jd4b4" event={"ID":"f8598397-df5e-4a4b-ac22-4348239ab87f","Type":"ContainerStarted","Data":"e422207ba46edc0b86cde1c31b351cf9cb0b405708b093a37d6c6870ba353cb4"} Oct 08 08:23:01 crc kubenswrapper[4693]: I1008 08:23:01.483003 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rtcj7/must-gather-jd4b4" event={"ID":"f8598397-df5e-4a4b-ac22-4348239ab87f","Type":"ContainerStarted","Data":"f22859f4e44b93c384c2c34cac4f08867e01f4d1310c9d95a953335f6160b430"} Oct 08 08:23:02 crc kubenswrapper[4693]: I1008 08:23:02.493341 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rtcj7/must-gather-jd4b4" event={"ID":"f8598397-df5e-4a4b-ac22-4348239ab87f","Type":"ContainerStarted","Data":"27dd57459d447c99202c691db833fcf89e50d3dc7d31c67ef6fa13e91ed1263b"} Oct 08 08:23:02 crc kubenswrapper[4693]: I1008 08:23:02.516267 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-rtcj7/must-gather-jd4b4" podStartSLOduration=2.516249735 podStartE2EDuration="2.516249735s" podCreationTimestamp="2025-10-08 08:23:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 08:23:02.509285622 +0000 UTC m=+3967.880250567" watchObservedRunningTime="2025-10-08 08:23:02.516249735 +0000 UTC m=+3967.887214670" Oct 08 08:23:04 crc kubenswrapper[4693]: I1008 08:23:04.990948 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-rtcj7/crc-debug-hw2jv"] Oct 08 08:23:04 crc kubenswrapper[4693]: I1008 08:23:04.992902 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rtcj7/crc-debug-hw2jv" Oct 08 08:23:04 crc kubenswrapper[4693]: I1008 08:23:04.995188 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-rtcj7"/"default-dockercfg-rqpq8" Oct 08 08:23:05 crc kubenswrapper[4693]: I1008 08:23:05.003390 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/46be0fcc-a651-4a31-94a5-e2cdb1eb4775-host\") pod \"crc-debug-hw2jv\" (UID: \"46be0fcc-a651-4a31-94a5-e2cdb1eb4775\") " pod="openshift-must-gather-rtcj7/crc-debug-hw2jv" Oct 08 08:23:05 crc kubenswrapper[4693]: I1008 08:23:05.003522 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7mdn\" (UniqueName: \"kubernetes.io/projected/46be0fcc-a651-4a31-94a5-e2cdb1eb4775-kube-api-access-f7mdn\") pod \"crc-debug-hw2jv\" (UID: \"46be0fcc-a651-4a31-94a5-e2cdb1eb4775\") " pod="openshift-must-gather-rtcj7/crc-debug-hw2jv" Oct 08 08:23:05 crc kubenswrapper[4693]: I1008 08:23:05.104573 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/46be0fcc-a651-4a31-94a5-e2cdb1eb4775-host\") pod \"crc-debug-hw2jv\" (UID: \"46be0fcc-a651-4a31-94a5-e2cdb1eb4775\") " pod="openshift-must-gather-rtcj7/crc-debug-hw2jv" Oct 08 08:23:05 crc kubenswrapper[4693]: I1008 08:23:05.104673 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7mdn\" (UniqueName: \"kubernetes.io/projected/46be0fcc-a651-4a31-94a5-e2cdb1eb4775-kube-api-access-f7mdn\") pod \"crc-debug-hw2jv\" (UID: \"46be0fcc-a651-4a31-94a5-e2cdb1eb4775\") " pod="openshift-must-gather-rtcj7/crc-debug-hw2jv" Oct 08 08:23:05 crc kubenswrapper[4693]: I1008 08:23:05.104777 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/46be0fcc-a651-4a31-94a5-e2cdb1eb4775-host\") pod \"crc-debug-hw2jv\" (UID: \"46be0fcc-a651-4a31-94a5-e2cdb1eb4775\") " pod="openshift-must-gather-rtcj7/crc-debug-hw2jv" Oct 08 08:23:05 crc kubenswrapper[4693]: I1008 08:23:05.124925 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7mdn\" (UniqueName: \"kubernetes.io/projected/46be0fcc-a651-4a31-94a5-e2cdb1eb4775-kube-api-access-f7mdn\") pod \"crc-debug-hw2jv\" (UID: \"46be0fcc-a651-4a31-94a5-e2cdb1eb4775\") " pod="openshift-must-gather-rtcj7/crc-debug-hw2jv" Oct 08 08:23:05 crc kubenswrapper[4693]: I1008 08:23:05.313596 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rtcj7/crc-debug-hw2jv" Oct 08 08:23:05 crc kubenswrapper[4693]: I1008 08:23:05.519644 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rtcj7/crc-debug-hw2jv" event={"ID":"46be0fcc-a651-4a31-94a5-e2cdb1eb4775","Type":"ContainerStarted","Data":"23b5b7b5d1875d70acc78831324a1915e52719b46eb9c249c7168bdcfa0a5dc1"} Oct 08 08:23:06 crc kubenswrapper[4693]: I1008 08:23:06.535526 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rtcj7/crc-debug-hw2jv" event={"ID":"46be0fcc-a651-4a31-94a5-e2cdb1eb4775","Type":"ContainerStarted","Data":"cd1f35455fb74e7192b764270b78c63ae546eb112bcca03e3809bbc63f84033a"} Oct 08 08:23:06 crc kubenswrapper[4693]: I1008 08:23:06.574910 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-rtcj7/crc-debug-hw2jv" podStartSLOduration=2.5748831 podStartE2EDuration="2.5748831s" podCreationTimestamp="2025-10-08 08:23:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 08:23:06.562986858 +0000 UTC m=+3971.933951793" watchObservedRunningTime="2025-10-08 08:23:06.5748831 +0000 UTC m=+3971.945848035" Oct 08 08:23:23 crc kubenswrapper[4693]: I1008 08:23:23.490005 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 08:23:23 crc kubenswrapper[4693]: I1008 08:23:23.491370 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 08:23:31 crc kubenswrapper[4693]: I1008 08:23:31.915392 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tnk4z"] Oct 08 08:23:31 crc kubenswrapper[4693]: I1008 08:23:31.921105 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tnk4z" Oct 08 08:23:31 crc kubenswrapper[4693]: I1008 08:23:31.926905 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tnk4z"] Oct 08 08:23:32 crc kubenswrapper[4693]: I1008 08:23:32.020446 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlgnm\" (UniqueName: \"kubernetes.io/projected/ce099887-fefd-439c-87a7-de8a4e7c6862-kube-api-access-mlgnm\") pod \"redhat-operators-tnk4z\" (UID: \"ce099887-fefd-439c-87a7-de8a4e7c6862\") " pod="openshift-marketplace/redhat-operators-tnk4z" Oct 08 08:23:32 crc kubenswrapper[4693]: I1008 08:23:32.020592 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce099887-fefd-439c-87a7-de8a4e7c6862-utilities\") pod \"redhat-operators-tnk4z\" (UID: \"ce099887-fefd-439c-87a7-de8a4e7c6862\") " pod="openshift-marketplace/redhat-operators-tnk4z" Oct 08 08:23:32 crc kubenswrapper[4693]: I1008 08:23:32.020642 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce099887-fefd-439c-87a7-de8a4e7c6862-catalog-content\") pod \"redhat-operators-tnk4z\" (UID: \"ce099887-fefd-439c-87a7-de8a4e7c6862\") " pod="openshift-marketplace/redhat-operators-tnk4z" Oct 08 08:23:32 crc kubenswrapper[4693]: I1008 08:23:32.122946 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce099887-fefd-439c-87a7-de8a4e7c6862-utilities\") pod \"redhat-operators-tnk4z\" (UID: \"ce099887-fefd-439c-87a7-de8a4e7c6862\") " pod="openshift-marketplace/redhat-operators-tnk4z" Oct 08 08:23:32 crc kubenswrapper[4693]: I1008 08:23:32.123031 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce099887-fefd-439c-87a7-de8a4e7c6862-catalog-content\") pod \"redhat-operators-tnk4z\" (UID: \"ce099887-fefd-439c-87a7-de8a4e7c6862\") " pod="openshift-marketplace/redhat-operators-tnk4z" Oct 08 08:23:32 crc kubenswrapper[4693]: I1008 08:23:32.123062 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlgnm\" (UniqueName: \"kubernetes.io/projected/ce099887-fefd-439c-87a7-de8a4e7c6862-kube-api-access-mlgnm\") pod \"redhat-operators-tnk4z\" (UID: \"ce099887-fefd-439c-87a7-de8a4e7c6862\") " pod="openshift-marketplace/redhat-operators-tnk4z" Oct 08 08:23:32 crc kubenswrapper[4693]: I1008 08:23:32.124431 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce099887-fefd-439c-87a7-de8a4e7c6862-utilities\") pod \"redhat-operators-tnk4z\" (UID: \"ce099887-fefd-439c-87a7-de8a4e7c6862\") " pod="openshift-marketplace/redhat-operators-tnk4z" Oct 08 08:23:32 crc kubenswrapper[4693]: I1008 08:23:32.124464 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce099887-fefd-439c-87a7-de8a4e7c6862-catalog-content\") pod \"redhat-operators-tnk4z\" (UID: \"ce099887-fefd-439c-87a7-de8a4e7c6862\") " pod="openshift-marketplace/redhat-operators-tnk4z" Oct 08 08:23:32 crc kubenswrapper[4693]: I1008 08:23:32.140504 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlgnm\" (UniqueName: \"kubernetes.io/projected/ce099887-fefd-439c-87a7-de8a4e7c6862-kube-api-access-mlgnm\") pod \"redhat-operators-tnk4z\" (UID: \"ce099887-fefd-439c-87a7-de8a4e7c6862\") " pod="openshift-marketplace/redhat-operators-tnk4z" Oct 08 08:23:32 crc kubenswrapper[4693]: I1008 08:23:32.258017 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tnk4z" Oct 08 08:23:32 crc kubenswrapper[4693]: I1008 08:23:32.741178 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tnk4z"] Oct 08 08:23:32 crc kubenswrapper[4693]: I1008 08:23:32.763232 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnk4z" event={"ID":"ce099887-fefd-439c-87a7-de8a4e7c6862","Type":"ContainerStarted","Data":"91da254ec4cfc693d84158aa40fb02f02948e33dce82cb2353accb9821e5cc44"} Oct 08 08:23:33 crc kubenswrapper[4693]: I1008 08:23:33.795365 4693 generic.go:334] "Generic (PLEG): container finished" podID="ce099887-fefd-439c-87a7-de8a4e7c6862" containerID="81534fc62e9ef4d0b63a11022c34b611140a8be86a2a5f0bdb7f489ae97fd26b" exitCode=0 Oct 08 08:23:33 crc kubenswrapper[4693]: I1008 08:23:33.795852 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnk4z" event={"ID":"ce099887-fefd-439c-87a7-de8a4e7c6862","Type":"ContainerDied","Data":"81534fc62e9ef4d0b63a11022c34b611140a8be86a2a5f0bdb7f489ae97fd26b"} Oct 08 08:23:34 crc kubenswrapper[4693]: I1008 08:23:34.808272 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnk4z" event={"ID":"ce099887-fefd-439c-87a7-de8a4e7c6862","Type":"ContainerStarted","Data":"398cc24d4a6f7b9865a7207d904361c4c2690f7c3b975516659f077ccb173530"} Oct 08 08:23:35 crc kubenswrapper[4693]: I1008 08:23:35.818946 4693 generic.go:334] "Generic (PLEG): container finished" podID="ce099887-fefd-439c-87a7-de8a4e7c6862" containerID="398cc24d4a6f7b9865a7207d904361c4c2690f7c3b975516659f077ccb173530" exitCode=0 Oct 08 08:23:35 crc kubenswrapper[4693]: I1008 08:23:35.818996 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnk4z" event={"ID":"ce099887-fefd-439c-87a7-de8a4e7c6862","Type":"ContainerDied","Data":"398cc24d4a6f7b9865a7207d904361c4c2690f7c3b975516659f077ccb173530"} Oct 08 08:23:36 crc kubenswrapper[4693]: I1008 08:23:36.832177 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnk4z" event={"ID":"ce099887-fefd-439c-87a7-de8a4e7c6862","Type":"ContainerStarted","Data":"387a56b4ba3b114d7288e6d439497b48c1b26df84ec5975414760f7a435c10b0"} Oct 08 08:23:36 crc kubenswrapper[4693]: I1008 08:23:36.861379 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tnk4z" podStartSLOduration=3.279028933 podStartE2EDuration="5.861362259s" podCreationTimestamp="2025-10-08 08:23:31 +0000 UTC" firstStartedPulling="2025-10-08 08:23:33.798168828 +0000 UTC m=+3999.169133763" lastFinishedPulling="2025-10-08 08:23:36.380502154 +0000 UTC m=+4001.751467089" observedRunningTime="2025-10-08 08:23:36.850781731 +0000 UTC m=+4002.221746666" watchObservedRunningTime="2025-10-08 08:23:36.861362259 +0000 UTC m=+4002.232327194" Oct 08 08:23:42 crc kubenswrapper[4693]: I1008 08:23:42.258186 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tnk4z" Oct 08 08:23:42 crc kubenswrapper[4693]: I1008 08:23:42.258628 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tnk4z" Oct 08 08:23:42 crc kubenswrapper[4693]: I1008 08:23:42.310941 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tnk4z" Oct 08 08:23:42 crc kubenswrapper[4693]: I1008 08:23:42.955359 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tnk4z" Oct 08 08:23:43 crc kubenswrapper[4693]: I1008 08:23:43.008073 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tnk4z"] Oct 08 08:23:44 crc kubenswrapper[4693]: I1008 08:23:44.915648 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tnk4z" podUID="ce099887-fefd-439c-87a7-de8a4e7c6862" containerName="registry-server" containerID="cri-o://387a56b4ba3b114d7288e6d439497b48c1b26df84ec5975414760f7a435c10b0" gracePeriod=2 Oct 08 08:23:45 crc kubenswrapper[4693]: I1008 08:23:45.932452 4693 generic.go:334] "Generic (PLEG): container finished" podID="ce099887-fefd-439c-87a7-de8a4e7c6862" containerID="387a56b4ba3b114d7288e6d439497b48c1b26df84ec5975414760f7a435c10b0" exitCode=0 Oct 08 08:23:45 crc kubenswrapper[4693]: I1008 08:23:45.932500 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnk4z" event={"ID":"ce099887-fefd-439c-87a7-de8a4e7c6862","Type":"ContainerDied","Data":"387a56b4ba3b114d7288e6d439497b48c1b26df84ec5975414760f7a435c10b0"} Oct 08 08:23:46 crc kubenswrapper[4693]: I1008 08:23:46.128371 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tnk4z" Oct 08 08:23:46 crc kubenswrapper[4693]: I1008 08:23:46.214931 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mlgnm\" (UniqueName: \"kubernetes.io/projected/ce099887-fefd-439c-87a7-de8a4e7c6862-kube-api-access-mlgnm\") pod \"ce099887-fefd-439c-87a7-de8a4e7c6862\" (UID: \"ce099887-fefd-439c-87a7-de8a4e7c6862\") " Oct 08 08:23:46 crc kubenswrapper[4693]: I1008 08:23:46.215037 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce099887-fefd-439c-87a7-de8a4e7c6862-utilities\") pod \"ce099887-fefd-439c-87a7-de8a4e7c6862\" (UID: \"ce099887-fefd-439c-87a7-de8a4e7c6862\") " Oct 08 08:23:46 crc kubenswrapper[4693]: I1008 08:23:46.215170 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce099887-fefd-439c-87a7-de8a4e7c6862-catalog-content\") pod \"ce099887-fefd-439c-87a7-de8a4e7c6862\" (UID: \"ce099887-fefd-439c-87a7-de8a4e7c6862\") " Oct 08 08:23:46 crc kubenswrapper[4693]: I1008 08:23:46.216189 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce099887-fefd-439c-87a7-de8a4e7c6862-utilities" (OuterVolumeSpecName: "utilities") pod "ce099887-fefd-439c-87a7-de8a4e7c6862" (UID: "ce099887-fefd-439c-87a7-de8a4e7c6862"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:23:46 crc kubenswrapper[4693]: I1008 08:23:46.227171 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce099887-fefd-439c-87a7-de8a4e7c6862-kube-api-access-mlgnm" (OuterVolumeSpecName: "kube-api-access-mlgnm") pod "ce099887-fefd-439c-87a7-de8a4e7c6862" (UID: "ce099887-fefd-439c-87a7-de8a4e7c6862"). InnerVolumeSpecName "kube-api-access-mlgnm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 08:23:46 crc kubenswrapper[4693]: I1008 08:23:46.307835 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce099887-fefd-439c-87a7-de8a4e7c6862-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ce099887-fefd-439c-87a7-de8a4e7c6862" (UID: "ce099887-fefd-439c-87a7-de8a4e7c6862"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:23:46 crc kubenswrapper[4693]: I1008 08:23:46.317631 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce099887-fefd-439c-87a7-de8a4e7c6862-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 08:23:46 crc kubenswrapper[4693]: I1008 08:23:46.317676 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce099887-fefd-439c-87a7-de8a4e7c6862-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 08:23:46 crc kubenswrapper[4693]: I1008 08:23:46.317692 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mlgnm\" (UniqueName: \"kubernetes.io/projected/ce099887-fefd-439c-87a7-de8a4e7c6862-kube-api-access-mlgnm\") on node \"crc\" DevicePath \"\"" Oct 08 08:23:46 crc kubenswrapper[4693]: I1008 08:23:46.949117 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnk4z" event={"ID":"ce099887-fefd-439c-87a7-de8a4e7c6862","Type":"ContainerDied","Data":"91da254ec4cfc693d84158aa40fb02f02948e33dce82cb2353accb9821e5cc44"} Oct 08 08:23:46 crc kubenswrapper[4693]: I1008 08:23:46.949196 4693 scope.go:117] "RemoveContainer" containerID="387a56b4ba3b114d7288e6d439497b48c1b26df84ec5975414760f7a435c10b0" Oct 08 08:23:46 crc kubenswrapper[4693]: I1008 08:23:46.949328 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tnk4z" Oct 08 08:23:46 crc kubenswrapper[4693]: I1008 08:23:46.977636 4693 scope.go:117] "RemoveContainer" containerID="398cc24d4a6f7b9865a7207d904361c4c2690f7c3b975516659f077ccb173530" Oct 08 08:23:46 crc kubenswrapper[4693]: I1008 08:23:46.987550 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tnk4z"] Oct 08 08:23:46 crc kubenswrapper[4693]: I1008 08:23:46.997753 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tnk4z"] Oct 08 08:23:47 crc kubenswrapper[4693]: I1008 08:23:47.020445 4693 scope.go:117] "RemoveContainer" containerID="81534fc62e9ef4d0b63a11022c34b611140a8be86a2a5f0bdb7f489ae97fd26b" Oct 08 08:23:47 crc kubenswrapper[4693]: I1008 08:23:47.377414 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce099887-fefd-439c-87a7-de8a4e7c6862" path="/var/lib/kubelet/pods/ce099887-fefd-439c-87a7-de8a4e7c6862/volumes" Oct 08 08:23:53 crc kubenswrapper[4693]: I1008 08:23:53.489552 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 08:23:53 crc kubenswrapper[4693]: I1008 08:23:53.491306 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 08:24:10 crc kubenswrapper[4693]: I1008 08:24:10.016283 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-78f9f7b5dd-wdpgb_bae6b42f-5a3c-4568-b8db-84be1514827e/barbican-api/0.log" Oct 08 08:24:10 crc kubenswrapper[4693]: I1008 08:24:10.018430 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-78f9f7b5dd-wdpgb_bae6b42f-5a3c-4568-b8db-84be1514827e/barbican-api-log/0.log" Oct 08 08:24:10 crc kubenswrapper[4693]: I1008 08:24:10.214622 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7d588c7bd-f7dbq_f3bebe35-d072-4368-ba55-d8415a4f44ef/barbican-keystone-listener/0.log" Oct 08 08:24:10 crc kubenswrapper[4693]: I1008 08:24:10.253269 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7d588c7bd-f7dbq_f3bebe35-d072-4368-ba55-d8415a4f44ef/barbican-keystone-listener-log/0.log" Oct 08 08:24:10 crc kubenswrapper[4693]: I1008 08:24:10.419648 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-69584f958c-5wpzz_f74783b1-e062-4c4f-82eb-a7df2387913d/barbican-worker/0.log" Oct 08 08:24:10 crc kubenswrapper[4693]: I1008 08:24:10.469762 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-69584f958c-5wpzz_f74783b1-e062-4c4f-82eb-a7df2387913d/barbican-worker-log/0.log" Oct 08 08:24:10 crc kubenswrapper[4693]: I1008 08:24:10.678551 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-jcxpz_ac6cb698-ba08-46e2-a8ae-557f656d3209/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:24:10 crc kubenswrapper[4693]: I1008 08:24:10.879953 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_68fc1107-d02a-4138-bd74-648778e9302d/ceilometer-central-agent/0.log" Oct 08 08:24:10 crc kubenswrapper[4693]: I1008 08:24:10.919030 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_68fc1107-d02a-4138-bd74-648778e9302d/ceilometer-notification-agent/0.log" Oct 08 08:24:10 crc kubenswrapper[4693]: I1008 08:24:10.924515 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_68fc1107-d02a-4138-bd74-648778e9302d/proxy-httpd/0.log" Oct 08 08:24:11 crc kubenswrapper[4693]: I1008 08:24:11.047955 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_68fc1107-d02a-4138-bd74-648778e9302d/sg-core/0.log" Oct 08 08:24:11 crc kubenswrapper[4693]: I1008 08:24:11.179475 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_ebd0e852-8dca-49c3-9af2-00f4d652216e/cinder-api/0.log" Oct 08 08:24:11 crc kubenswrapper[4693]: I1008 08:24:11.278727 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_ebd0e852-8dca-49c3-9af2-00f4d652216e/cinder-api-log/0.log" Oct 08 08:24:11 crc kubenswrapper[4693]: I1008 08:24:11.407846 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_3b12dbfa-195c-43ea-ae2b-267a8733add4/cinder-scheduler/0.log" Oct 08 08:24:11 crc kubenswrapper[4693]: I1008 08:24:11.510022 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_3b12dbfa-195c-43ea-ae2b-267a8733add4/probe/0.log" Oct 08 08:24:11 crc kubenswrapper[4693]: I1008 08:24:11.591608 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-727b5_2faa8ca0-e93e-4532-bf6c-00f2064bf177/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:24:11 crc kubenswrapper[4693]: I1008 08:24:11.778863 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-rnxkd_eecc70b9-2687-499e-89e1-f2346e8088f6/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:24:11 crc kubenswrapper[4693]: I1008 08:24:11.941405 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-xf42b_06bebec7-3818-42dc-b357-7ef2ea40a463/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:24:12 crc kubenswrapper[4693]: I1008 08:24:12.154332 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-55478c4467-2nfsx_77ca5c51-5f17-4793-897e-235a54c041c2/init/0.log" Oct 08 08:24:12 crc kubenswrapper[4693]: I1008 08:24:12.286965 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-55478c4467-2nfsx_77ca5c51-5f17-4793-897e-235a54c041c2/init/0.log" Oct 08 08:24:12 crc kubenswrapper[4693]: I1008 08:24:12.361580 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-55478c4467-2nfsx_77ca5c51-5f17-4793-897e-235a54c041c2/dnsmasq-dns/0.log" Oct 08 08:24:12 crc kubenswrapper[4693]: I1008 08:24:12.520946 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-brvlv_a7a301e7-dfc4-47d6-acf1-f34b19e1e13a/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:24:12 crc kubenswrapper[4693]: I1008 08:24:12.605872 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_06c30f6d-189d-4e3e-98f3-156a7784963c/glance-httpd/0.log" Oct 08 08:24:12 crc kubenswrapper[4693]: I1008 08:24:12.698897 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_06c30f6d-189d-4e3e-98f3-156a7784963c/glance-log/0.log" Oct 08 08:24:12 crc kubenswrapper[4693]: I1008 08:24:12.858417 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_d34c5891-320c-402e-9ee6-0f75ba7e2bbb/glance-httpd/0.log" Oct 08 08:24:12 crc kubenswrapper[4693]: I1008 08:24:12.931559 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_d34c5891-320c-402e-9ee6-0f75ba7e2bbb/glance-log/0.log" Oct 08 08:24:13 crc kubenswrapper[4693]: I1008 08:24:13.131623 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-786b4cdb4-z6p8n_1f26734d-12eb-4c6c-9e68-254a30cea3b6/horizon/0.log" Oct 08 08:24:13 crc kubenswrapper[4693]: I1008 08:24:13.185836 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-4fxbp_36d7c0e2-4414-4f5d-ace2-37e627b6e330/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:24:13 crc kubenswrapper[4693]: I1008 08:24:13.408118 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-786b4cdb4-z6p8n_1f26734d-12eb-4c6c-9e68-254a30cea3b6/horizon-log/0.log" Oct 08 08:24:13 crc kubenswrapper[4693]: I1008 08:24:13.441501 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-8r5hs_e8c4dd95-f13a-4479-99a8-9ea12766ac48/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:24:13 crc kubenswrapper[4693]: I1008 08:24:13.648292 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29331841-z6dhr_0e12eea6-ac5f-47c8-810b-b304ee039431/keystone-cron/0.log" Oct 08 08:24:13 crc kubenswrapper[4693]: I1008 08:24:13.675289 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-5bbc7cbf94-5tkqs_e4dc4d60-5d83-4f09-986c-a394c44788b5/keystone-api/0.log" Oct 08 08:24:13 crc kubenswrapper[4693]: I1008 08:24:13.807833 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_028998c5-3dec-46de-a5bb-bc5855df099e/kube-state-metrics/0.log" Oct 08 08:24:13 crc kubenswrapper[4693]: I1008 08:24:13.859650 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-q8wtk_014202b7-db23-455e-ba57-d12d3b6e2975/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:24:14 crc kubenswrapper[4693]: I1008 08:24:14.152950 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-77bfdd5769-m42ll_e8c282e4-9865-41ec-922f-86d322b60ea0/neutron-api/0.log" Oct 08 08:24:14 crc kubenswrapper[4693]: I1008 08:24:14.229283 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-77bfdd5769-m42ll_e8c282e4-9865-41ec-922f-86d322b60ea0/neutron-httpd/0.log" Oct 08 08:24:14 crc kubenswrapper[4693]: I1008 08:24:14.412284 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-68qtc_0d3eae26-e892-4687-bd4c-4cbd1a566e56/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:24:14 crc kubenswrapper[4693]: I1008 08:24:14.959984 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_106dc2ce-316f-4e4e-a87c-ada5021fea4b/nova-api-log/0.log" Oct 08 08:24:15 crc kubenswrapper[4693]: I1008 08:24:15.136525 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_bf23439a-4d07-4711-9190-3fce06bdf2e4/nova-cell0-conductor-conductor/0.log" Oct 08 08:24:15 crc kubenswrapper[4693]: I1008 08:24:15.369766 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_106dc2ce-316f-4e4e-a87c-ada5021fea4b/nova-api-api/0.log" Oct 08 08:24:15 crc kubenswrapper[4693]: I1008 08:24:15.839339 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_2b15b479-b3e9-4af4-bb60-3f6ca0ed053e/nova-cell1-conductor-conductor/0.log" Oct 08 08:24:16 crc kubenswrapper[4693]: I1008 08:24:16.010935 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_532a7d37-716c-43b9-b417-8f9ab3ed3dcf/nova-cell1-novncproxy-novncproxy/0.log" Oct 08 08:24:16 crc kubenswrapper[4693]: I1008 08:24:16.097087 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-n87pd_14d12cdc-edb4-47c1-b245-b95cb21067bd/nova-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:24:16 crc kubenswrapper[4693]: I1008 08:24:16.288396 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_22aa81a6-83fc-4751-aa3b-c77361db77c0/nova-metadata-log/0.log" Oct 08 08:24:16 crc kubenswrapper[4693]: I1008 08:24:16.767314 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_4fcb903f-8f89-47a5-b120-a3e8daaaa2ae/nova-scheduler-scheduler/0.log" Oct 08 08:24:16 crc kubenswrapper[4693]: I1008 08:24:16.811557 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_c4cf2123-362f-4d9f-8080-bd9d6e13de17/mysql-bootstrap/0.log" Oct 08 08:24:17 crc kubenswrapper[4693]: I1008 08:24:17.336898 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_c4cf2123-362f-4d9f-8080-bd9d6e13de17/mysql-bootstrap/0.log" Oct 08 08:24:17 crc kubenswrapper[4693]: I1008 08:24:17.345854 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_c4cf2123-362f-4d9f-8080-bd9d6e13de17/galera/0.log" Oct 08 08:24:17 crc kubenswrapper[4693]: I1008 08:24:17.577593 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_1aa3187a-5fce-4486-a846-709a6231383f/mysql-bootstrap/0.log" Oct 08 08:24:17 crc kubenswrapper[4693]: I1008 08:24:17.691204 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_22aa81a6-83fc-4751-aa3b-c77361db77c0/nova-metadata-metadata/0.log" Oct 08 08:24:17 crc kubenswrapper[4693]: I1008 08:24:17.759281 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_1aa3187a-5fce-4486-a846-709a6231383f/galera/0.log" Oct 08 08:24:17 crc kubenswrapper[4693]: I1008 08:24:17.759899 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_1aa3187a-5fce-4486-a846-709a6231383f/mysql-bootstrap/0.log" Oct 08 08:24:17 crc kubenswrapper[4693]: I1008 08:24:17.919348 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_de8f5998-6e3d-4695-affe-f3afab2d2528/openstackclient/0.log" Oct 08 08:24:18 crc kubenswrapper[4693]: I1008 08:24:18.122311 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-47xpx_dd6332ac-70b3-4137-9419-3d394f270aa3/openstack-network-exporter/0.log" Oct 08 08:24:18 crc kubenswrapper[4693]: I1008 08:24:18.237209 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5mdkq_391f1e55-ea6a-4d2f-ae2a-08adfad94698/ovsdb-server-init/0.log" Oct 08 08:24:18 crc kubenswrapper[4693]: I1008 08:24:18.434435 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5mdkq_391f1e55-ea6a-4d2f-ae2a-08adfad94698/ovs-vswitchd/0.log" Oct 08 08:24:18 crc kubenswrapper[4693]: I1008 08:24:18.468500 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5mdkq_391f1e55-ea6a-4d2f-ae2a-08adfad94698/ovsdb-server/0.log" Oct 08 08:24:18 crc kubenswrapper[4693]: I1008 08:24:18.493290 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5mdkq_391f1e55-ea6a-4d2f-ae2a-08adfad94698/ovsdb-server-init/0.log" Oct 08 08:24:18 crc kubenswrapper[4693]: I1008 08:24:18.683557 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-qmltj_5ecbe7f3-77ea-42ad-a8d0-02bfba59f33d/ovn-controller/0.log" Oct 08 08:24:18 crc kubenswrapper[4693]: I1008 08:24:18.899924 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-ncmdw_2715cea9-fa27-469b-988a-338c5b80f62d/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:24:18 crc kubenswrapper[4693]: I1008 08:24:18.940297 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_e01f071e-63e7-4a6e-b321-5f489621b814/openstack-network-exporter/0.log" Oct 08 08:24:19 crc kubenswrapper[4693]: I1008 08:24:19.108344 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_e01f071e-63e7-4a6e-b321-5f489621b814/ovn-northd/0.log" Oct 08 08:24:19 crc kubenswrapper[4693]: I1008 08:24:19.289533 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_02c7bbbf-7730-4f24-b131-92411b14dcb0/openstack-network-exporter/0.log" Oct 08 08:24:19 crc kubenswrapper[4693]: I1008 08:24:19.418860 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_02c7bbbf-7730-4f24-b131-92411b14dcb0/ovsdbserver-nb/0.log" Oct 08 08:24:19 crc kubenswrapper[4693]: I1008 08:24:19.474442 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_5d809602-04d2-4d1f-b024-30fecd9b2256/openstack-network-exporter/0.log" Oct 08 08:24:19 crc kubenswrapper[4693]: I1008 08:24:19.648455 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_5d809602-04d2-4d1f-b024-30fecd9b2256/ovsdbserver-sb/0.log" Oct 08 08:24:19 crc kubenswrapper[4693]: I1008 08:24:19.794166 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-84d7c4f8cb-75jz5_7285e65f-f435-4b74-8019-c5acad9b74c7/placement-api/0.log" Oct 08 08:24:19 crc kubenswrapper[4693]: I1008 08:24:19.905978 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-84d7c4f8cb-75jz5_7285e65f-f435-4b74-8019-c5acad9b74c7/placement-log/0.log" Oct 08 08:24:19 crc kubenswrapper[4693]: I1008 08:24:19.977936 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a/setup-container/0.log" Oct 08 08:24:20 crc kubenswrapper[4693]: I1008 08:24:20.238894 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a/setup-container/0.log" Oct 08 08:24:20 crc kubenswrapper[4693]: I1008 08:24:20.293437 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_1cb1ac5f-3a31-47eb-8ed2-99d0934d2e8a/rabbitmq/0.log" Oct 08 08:24:20 crc kubenswrapper[4693]: I1008 08:24:20.424462 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c40b5a1f-c5fc-4885-9816-b7b2cfc98423/setup-container/0.log" Oct 08 08:24:20 crc kubenswrapper[4693]: I1008 08:24:20.611609 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c40b5a1f-c5fc-4885-9816-b7b2cfc98423/setup-container/0.log" Oct 08 08:24:20 crc kubenswrapper[4693]: I1008 08:24:20.633300 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c40b5a1f-c5fc-4885-9816-b7b2cfc98423/rabbitmq/0.log" Oct 08 08:24:20 crc kubenswrapper[4693]: I1008 08:24:20.781157 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-vd7qg_cd25f9bb-a470-4aa4-8afa-6b484fa192c1/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:24:20 crc kubenswrapper[4693]: I1008 08:24:20.950014 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-jslrn_43bf2dc8-6a52-47ce-978e-9d9fef6ae67c/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:24:21 crc kubenswrapper[4693]: I1008 08:24:21.036750 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-h8dnv_2213ef00-9e58-4d62-84f2-026ff39b7127/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:24:21 crc kubenswrapper[4693]: I1008 08:24:21.264641 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-c2gpk_4c0a2d61-5fe0-4adb-91fc-0d1f27273ca4/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:24:21 crc kubenswrapper[4693]: I1008 08:24:21.370749 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-dj5jp_ce57c0aa-4ff0-4cd0-88e2-4b3d2d9b4399/ssh-known-hosts-edpm-deployment/0.log" Oct 08 08:24:21 crc kubenswrapper[4693]: I1008 08:24:21.549675 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-6db965f4c9-sszpw_ba5fbd22-39c2-49ae-a74f-ee328cb29a02/proxy-server/0.log" Oct 08 08:24:21 crc kubenswrapper[4693]: I1008 08:24:21.604833 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-6db965f4c9-sszpw_ba5fbd22-39c2-49ae-a74f-ee328cb29a02/proxy-httpd/0.log" Oct 08 08:24:21 crc kubenswrapper[4693]: I1008 08:24:21.707168 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-m9dkh_7db9c043-f734-4339-8691-8276fc1a459b/swift-ring-rebalance/0.log" Oct 08 08:24:21 crc kubenswrapper[4693]: I1008 08:24:21.965377 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/account-auditor/0.log" Oct 08 08:24:22 crc kubenswrapper[4693]: I1008 08:24:22.006585 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/account-reaper/0.log" Oct 08 08:24:22 crc kubenswrapper[4693]: I1008 08:24:22.145035 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/account-replicator/0.log" Oct 08 08:24:22 crc kubenswrapper[4693]: I1008 08:24:22.198268 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/account-server/0.log" Oct 08 08:24:22 crc kubenswrapper[4693]: I1008 08:24:22.212151 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/container-auditor/0.log" Oct 08 08:24:22 crc kubenswrapper[4693]: I1008 08:24:22.408348 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/container-updater/0.log" Oct 08 08:24:22 crc kubenswrapper[4693]: I1008 08:24:22.443442 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/container-server/0.log" Oct 08 08:24:22 crc kubenswrapper[4693]: I1008 08:24:22.460619 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/container-replicator/0.log" Oct 08 08:24:22 crc kubenswrapper[4693]: I1008 08:24:22.598565 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/object-expirer/0.log" Oct 08 08:24:22 crc kubenswrapper[4693]: I1008 08:24:22.606582 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/object-auditor/0.log" Oct 08 08:24:22 crc kubenswrapper[4693]: I1008 08:24:22.709903 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/object-replicator/0.log" Oct 08 08:24:22 crc kubenswrapper[4693]: I1008 08:24:22.788564 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/object-server/0.log" Oct 08 08:24:22 crc kubenswrapper[4693]: I1008 08:24:22.835742 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/object-updater/0.log" Oct 08 08:24:22 crc kubenswrapper[4693]: I1008 08:24:22.946145 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/rsync/0.log" Oct 08 08:24:23 crc kubenswrapper[4693]: I1008 08:24:23.055229 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_298a15e0-992f-4f83-8067-7e8e6aa47b89/swift-recon-cron/0.log" Oct 08 08:24:23 crc kubenswrapper[4693]: I1008 08:24:23.152491 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-vrbbh_be7009a4-69bd-41cc-8fe8-02e5d79db395/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:24:23 crc kubenswrapper[4693]: I1008 08:24:23.309093 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_125016ff-a340-49c8-8c6f-9eed2093e1af/tempest-tests-tempest-tests-runner/0.log" Oct 08 08:24:23 crc kubenswrapper[4693]: I1008 08:24:23.465110 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_fed84ccb-a071-4df4-a6d4-1e5c227a609c/test-operator-logs-container/0.log" Oct 08 08:24:23 crc kubenswrapper[4693]: I1008 08:24:23.489314 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 08:24:23 crc kubenswrapper[4693]: I1008 08:24:23.489382 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 08:24:23 crc kubenswrapper[4693]: I1008 08:24:23.489437 4693 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 08:24:23 crc kubenswrapper[4693]: I1008 08:24:23.490272 4693 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f7ef691dbf7078b04c5820d01b35339f572409288d9df3c27d07056f58129fe4"} pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 08 08:24:23 crc kubenswrapper[4693]: I1008 08:24:23.490341 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" containerID="cri-o://f7ef691dbf7078b04c5820d01b35339f572409288d9df3c27d07056f58129fe4" gracePeriod=600 Oct 08 08:24:23 crc kubenswrapper[4693]: I1008 08:24:23.636341 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-ckmbz_4b1ce098-43e7-44eb-8416-806097ba000e/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 08 08:24:24 crc kubenswrapper[4693]: I1008 08:24:24.347640 4693 generic.go:334] "Generic (PLEG): container finished" podID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerID="f7ef691dbf7078b04c5820d01b35339f572409288d9df3c27d07056f58129fe4" exitCode=0 Oct 08 08:24:24 crc kubenswrapper[4693]: I1008 08:24:24.347966 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerDied","Data":"f7ef691dbf7078b04c5820d01b35339f572409288d9df3c27d07056f58129fe4"} Oct 08 08:24:24 crc kubenswrapper[4693]: I1008 08:24:24.347995 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerStarted","Data":"fa3cd56c0eed3942747ccd430bf922d53e92971c2b796f79bba7506d1c4e56fb"} Oct 08 08:24:24 crc kubenswrapper[4693]: I1008 08:24:24.348011 4693 scope.go:117] "RemoveContainer" containerID="28d5c519546455b87401c23a7a92c48a144ef9e2491253bfafff8086a83c852b" Oct 08 08:24:31 crc kubenswrapper[4693]: I1008 08:24:31.318507 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_225a20e0-eec7-4b8c-89e1-b4a2ebb513a3/memcached/0.log" Oct 08 08:24:51 crc kubenswrapper[4693]: I1008 08:24:51.605056 4693 generic.go:334] "Generic (PLEG): container finished" podID="46be0fcc-a651-4a31-94a5-e2cdb1eb4775" containerID="cd1f35455fb74e7192b764270b78c63ae546eb112bcca03e3809bbc63f84033a" exitCode=0 Oct 08 08:24:51 crc kubenswrapper[4693]: I1008 08:24:51.605116 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rtcj7/crc-debug-hw2jv" event={"ID":"46be0fcc-a651-4a31-94a5-e2cdb1eb4775","Type":"ContainerDied","Data":"cd1f35455fb74e7192b764270b78c63ae546eb112bcca03e3809bbc63f84033a"} Oct 08 08:24:52 crc kubenswrapper[4693]: I1008 08:24:52.709900 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rtcj7/crc-debug-hw2jv" Oct 08 08:24:52 crc kubenswrapper[4693]: I1008 08:24:52.716209 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/46be0fcc-a651-4a31-94a5-e2cdb1eb4775-host\") pod \"46be0fcc-a651-4a31-94a5-e2cdb1eb4775\" (UID: \"46be0fcc-a651-4a31-94a5-e2cdb1eb4775\") " Oct 08 08:24:52 crc kubenswrapper[4693]: I1008 08:24:52.716512 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f7mdn\" (UniqueName: \"kubernetes.io/projected/46be0fcc-a651-4a31-94a5-e2cdb1eb4775-kube-api-access-f7mdn\") pod \"46be0fcc-a651-4a31-94a5-e2cdb1eb4775\" (UID: \"46be0fcc-a651-4a31-94a5-e2cdb1eb4775\") " Oct 08 08:24:52 crc kubenswrapper[4693]: I1008 08:24:52.716528 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46be0fcc-a651-4a31-94a5-e2cdb1eb4775-host" (OuterVolumeSpecName: "host") pod "46be0fcc-a651-4a31-94a5-e2cdb1eb4775" (UID: "46be0fcc-a651-4a31-94a5-e2cdb1eb4775"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 08:24:52 crc kubenswrapper[4693]: I1008 08:24:52.717159 4693 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/46be0fcc-a651-4a31-94a5-e2cdb1eb4775-host\") on node \"crc\" DevicePath \"\"" Oct 08 08:24:52 crc kubenswrapper[4693]: I1008 08:24:52.735114 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46be0fcc-a651-4a31-94a5-e2cdb1eb4775-kube-api-access-f7mdn" (OuterVolumeSpecName: "kube-api-access-f7mdn") pod "46be0fcc-a651-4a31-94a5-e2cdb1eb4775" (UID: "46be0fcc-a651-4a31-94a5-e2cdb1eb4775"). InnerVolumeSpecName "kube-api-access-f7mdn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 08:24:52 crc kubenswrapper[4693]: I1008 08:24:52.765659 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-rtcj7/crc-debug-hw2jv"] Oct 08 08:24:52 crc kubenswrapper[4693]: I1008 08:24:52.774717 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-rtcj7/crc-debug-hw2jv"] Oct 08 08:24:52 crc kubenswrapper[4693]: I1008 08:24:52.819626 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f7mdn\" (UniqueName: \"kubernetes.io/projected/46be0fcc-a651-4a31-94a5-e2cdb1eb4775-kube-api-access-f7mdn\") on node \"crc\" DevicePath \"\"" Oct 08 08:24:53 crc kubenswrapper[4693]: I1008 08:24:53.394411 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46be0fcc-a651-4a31-94a5-e2cdb1eb4775" path="/var/lib/kubelet/pods/46be0fcc-a651-4a31-94a5-e2cdb1eb4775/volumes" Oct 08 08:24:53 crc kubenswrapper[4693]: E1008 08:24:53.495745 4693 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod46be0fcc_a651_4a31_94a5_e2cdb1eb4775.slice/crio-23b5b7b5d1875d70acc78831324a1915e52719b46eb9c249c7168bdcfa0a5dc1\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod46be0fcc_a651_4a31_94a5_e2cdb1eb4775.slice\": RecentStats: unable to find data in memory cache]" Oct 08 08:24:53 crc kubenswrapper[4693]: I1008 08:24:53.630255 4693 scope.go:117] "RemoveContainer" containerID="cd1f35455fb74e7192b764270b78c63ae546eb112bcca03e3809bbc63f84033a" Oct 08 08:24:53 crc kubenswrapper[4693]: I1008 08:24:53.630636 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rtcj7/crc-debug-hw2jv" Oct 08 08:24:54 crc kubenswrapper[4693]: I1008 08:24:54.001937 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-rtcj7/crc-debug-jzvf4"] Oct 08 08:24:54 crc kubenswrapper[4693]: E1008 08:24:54.002796 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46be0fcc-a651-4a31-94a5-e2cdb1eb4775" containerName="container-00" Oct 08 08:24:54 crc kubenswrapper[4693]: I1008 08:24:54.002843 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="46be0fcc-a651-4a31-94a5-e2cdb1eb4775" containerName="container-00" Oct 08 08:24:54 crc kubenswrapper[4693]: E1008 08:24:54.002878 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce099887-fefd-439c-87a7-de8a4e7c6862" containerName="extract-content" Oct 08 08:24:54 crc kubenswrapper[4693]: I1008 08:24:54.002889 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce099887-fefd-439c-87a7-de8a4e7c6862" containerName="extract-content" Oct 08 08:24:54 crc kubenswrapper[4693]: E1008 08:24:54.002919 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce099887-fefd-439c-87a7-de8a4e7c6862" containerName="registry-server" Oct 08 08:24:54 crc kubenswrapper[4693]: I1008 08:24:54.002931 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce099887-fefd-439c-87a7-de8a4e7c6862" containerName="registry-server" Oct 08 08:24:54 crc kubenswrapper[4693]: E1008 08:24:54.002951 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce099887-fefd-439c-87a7-de8a4e7c6862" containerName="extract-utilities" Oct 08 08:24:54 crc kubenswrapper[4693]: I1008 08:24:54.002963 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce099887-fefd-439c-87a7-de8a4e7c6862" containerName="extract-utilities" Oct 08 08:24:54 crc kubenswrapper[4693]: I1008 08:24:54.003298 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce099887-fefd-439c-87a7-de8a4e7c6862" containerName="registry-server" Oct 08 08:24:54 crc kubenswrapper[4693]: I1008 08:24:54.003342 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="46be0fcc-a651-4a31-94a5-e2cdb1eb4775" containerName="container-00" Oct 08 08:24:54 crc kubenswrapper[4693]: I1008 08:24:54.004322 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rtcj7/crc-debug-jzvf4" Oct 08 08:24:54 crc kubenswrapper[4693]: I1008 08:24:54.006262 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-rtcj7"/"default-dockercfg-rqpq8" Oct 08 08:24:54 crc kubenswrapper[4693]: I1008 08:24:54.145347 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c4746baa-0359-4b63-a3a6-0ab3e1a52338-host\") pod \"crc-debug-jzvf4\" (UID: \"c4746baa-0359-4b63-a3a6-0ab3e1a52338\") " pod="openshift-must-gather-rtcj7/crc-debug-jzvf4" Oct 08 08:24:54 crc kubenswrapper[4693]: I1008 08:24:54.145494 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmhjc\" (UniqueName: \"kubernetes.io/projected/c4746baa-0359-4b63-a3a6-0ab3e1a52338-kube-api-access-qmhjc\") pod \"crc-debug-jzvf4\" (UID: \"c4746baa-0359-4b63-a3a6-0ab3e1a52338\") " pod="openshift-must-gather-rtcj7/crc-debug-jzvf4" Oct 08 08:24:54 crc kubenswrapper[4693]: I1008 08:24:54.247546 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c4746baa-0359-4b63-a3a6-0ab3e1a52338-host\") pod \"crc-debug-jzvf4\" (UID: \"c4746baa-0359-4b63-a3a6-0ab3e1a52338\") " pod="openshift-must-gather-rtcj7/crc-debug-jzvf4" Oct 08 08:24:54 crc kubenswrapper[4693]: I1008 08:24:54.247671 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmhjc\" (UniqueName: \"kubernetes.io/projected/c4746baa-0359-4b63-a3a6-0ab3e1a52338-kube-api-access-qmhjc\") pod \"crc-debug-jzvf4\" (UID: \"c4746baa-0359-4b63-a3a6-0ab3e1a52338\") " pod="openshift-must-gather-rtcj7/crc-debug-jzvf4" Oct 08 08:24:54 crc kubenswrapper[4693]: I1008 08:24:54.247720 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c4746baa-0359-4b63-a3a6-0ab3e1a52338-host\") pod \"crc-debug-jzvf4\" (UID: \"c4746baa-0359-4b63-a3a6-0ab3e1a52338\") " pod="openshift-must-gather-rtcj7/crc-debug-jzvf4" Oct 08 08:24:54 crc kubenswrapper[4693]: I1008 08:24:54.549647 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmhjc\" (UniqueName: \"kubernetes.io/projected/c4746baa-0359-4b63-a3a6-0ab3e1a52338-kube-api-access-qmhjc\") pod \"crc-debug-jzvf4\" (UID: \"c4746baa-0359-4b63-a3a6-0ab3e1a52338\") " pod="openshift-must-gather-rtcj7/crc-debug-jzvf4" Oct 08 08:24:54 crc kubenswrapper[4693]: I1008 08:24:54.621274 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rtcj7/crc-debug-jzvf4" Oct 08 08:24:55 crc kubenswrapper[4693]: I1008 08:24:55.661613 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rtcj7/crc-debug-jzvf4" event={"ID":"c4746baa-0359-4b63-a3a6-0ab3e1a52338","Type":"ContainerStarted","Data":"d43495220dca863e9c4b2dc77814206360923a98f4892894c2228b7766adb654"} Oct 08 08:24:55 crc kubenswrapper[4693]: I1008 08:24:55.661686 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rtcj7/crc-debug-jzvf4" event={"ID":"c4746baa-0359-4b63-a3a6-0ab3e1a52338","Type":"ContainerStarted","Data":"e6e0f98e523632098dd26029de08b4984708d0bb893b6485509835e8ddd4307e"} Oct 08 08:24:55 crc kubenswrapper[4693]: I1008 08:24:55.686881 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-rtcj7/crc-debug-jzvf4" podStartSLOduration=2.686855634 podStartE2EDuration="2.686855634s" podCreationTimestamp="2025-10-08 08:24:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-08 08:24:55.684364718 +0000 UTC m=+4081.055329683" watchObservedRunningTime="2025-10-08 08:24:55.686855634 +0000 UTC m=+4081.057820579" Oct 08 08:24:56 crc kubenswrapper[4693]: I1008 08:24:56.671584 4693 generic.go:334] "Generic (PLEG): container finished" podID="c4746baa-0359-4b63-a3a6-0ab3e1a52338" containerID="d43495220dca863e9c4b2dc77814206360923a98f4892894c2228b7766adb654" exitCode=0 Oct 08 08:24:56 crc kubenswrapper[4693]: I1008 08:24:56.671665 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rtcj7/crc-debug-jzvf4" event={"ID":"c4746baa-0359-4b63-a3a6-0ab3e1a52338","Type":"ContainerDied","Data":"d43495220dca863e9c4b2dc77814206360923a98f4892894c2228b7766adb654"} Oct 08 08:24:57 crc kubenswrapper[4693]: I1008 08:24:57.781331 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rtcj7/crc-debug-jzvf4" Oct 08 08:24:57 crc kubenswrapper[4693]: I1008 08:24:57.920691 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c4746baa-0359-4b63-a3a6-0ab3e1a52338-host\") pod \"c4746baa-0359-4b63-a3a6-0ab3e1a52338\" (UID: \"c4746baa-0359-4b63-a3a6-0ab3e1a52338\") " Oct 08 08:24:57 crc kubenswrapper[4693]: I1008 08:24:57.920774 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmhjc\" (UniqueName: \"kubernetes.io/projected/c4746baa-0359-4b63-a3a6-0ab3e1a52338-kube-api-access-qmhjc\") pod \"c4746baa-0359-4b63-a3a6-0ab3e1a52338\" (UID: \"c4746baa-0359-4b63-a3a6-0ab3e1a52338\") " Oct 08 08:24:57 crc kubenswrapper[4693]: I1008 08:24:57.921421 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c4746baa-0359-4b63-a3a6-0ab3e1a52338-host" (OuterVolumeSpecName: "host") pod "c4746baa-0359-4b63-a3a6-0ab3e1a52338" (UID: "c4746baa-0359-4b63-a3a6-0ab3e1a52338"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 08:24:57 crc kubenswrapper[4693]: I1008 08:24:57.921945 4693 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c4746baa-0359-4b63-a3a6-0ab3e1a52338-host\") on node \"crc\" DevicePath \"\"" Oct 08 08:24:57 crc kubenswrapper[4693]: I1008 08:24:57.929665 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4746baa-0359-4b63-a3a6-0ab3e1a52338-kube-api-access-qmhjc" (OuterVolumeSpecName: "kube-api-access-qmhjc") pod "c4746baa-0359-4b63-a3a6-0ab3e1a52338" (UID: "c4746baa-0359-4b63-a3a6-0ab3e1a52338"). InnerVolumeSpecName "kube-api-access-qmhjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 08:24:58 crc kubenswrapper[4693]: I1008 08:24:58.024657 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmhjc\" (UniqueName: \"kubernetes.io/projected/c4746baa-0359-4b63-a3a6-0ab3e1a52338-kube-api-access-qmhjc\") on node \"crc\" DevicePath \"\"" Oct 08 08:24:58 crc kubenswrapper[4693]: I1008 08:24:58.693355 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rtcj7/crc-debug-jzvf4" event={"ID":"c4746baa-0359-4b63-a3a6-0ab3e1a52338","Type":"ContainerDied","Data":"e6e0f98e523632098dd26029de08b4984708d0bb893b6485509835e8ddd4307e"} Oct 08 08:24:58 crc kubenswrapper[4693]: I1008 08:24:58.693399 4693 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e6e0f98e523632098dd26029de08b4984708d0bb893b6485509835e8ddd4307e" Oct 08 08:24:58 crc kubenswrapper[4693]: I1008 08:24:58.693458 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rtcj7/crc-debug-jzvf4" Oct 08 08:25:02 crc kubenswrapper[4693]: I1008 08:25:02.493935 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-rtcj7/crc-debug-jzvf4"] Oct 08 08:25:02 crc kubenswrapper[4693]: I1008 08:25:02.509076 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-rtcj7/crc-debug-jzvf4"] Oct 08 08:25:03 crc kubenswrapper[4693]: I1008 08:25:03.373467 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4746baa-0359-4b63-a3a6-0ab3e1a52338" path="/var/lib/kubelet/pods/c4746baa-0359-4b63-a3a6-0ab3e1a52338/volumes" Oct 08 08:25:03 crc kubenswrapper[4693]: I1008 08:25:03.876869 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-rtcj7/crc-debug-l9cdw"] Oct 08 08:25:03 crc kubenswrapper[4693]: E1008 08:25:03.877568 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4746baa-0359-4b63-a3a6-0ab3e1a52338" containerName="container-00" Oct 08 08:25:03 crc kubenswrapper[4693]: I1008 08:25:03.877581 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4746baa-0359-4b63-a3a6-0ab3e1a52338" containerName="container-00" Oct 08 08:25:03 crc kubenswrapper[4693]: I1008 08:25:03.877781 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4746baa-0359-4b63-a3a6-0ab3e1a52338" containerName="container-00" Oct 08 08:25:03 crc kubenswrapper[4693]: I1008 08:25:03.878465 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rtcj7/crc-debug-l9cdw" Oct 08 08:25:03 crc kubenswrapper[4693]: I1008 08:25:03.881673 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-rtcj7"/"default-dockercfg-rqpq8" Oct 08 08:25:03 crc kubenswrapper[4693]: I1008 08:25:03.937897 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5brj\" (UniqueName: \"kubernetes.io/projected/e3838aad-d20c-4a45-9a03-377f22273bfc-kube-api-access-j5brj\") pod \"crc-debug-l9cdw\" (UID: \"e3838aad-d20c-4a45-9a03-377f22273bfc\") " pod="openshift-must-gather-rtcj7/crc-debug-l9cdw" Oct 08 08:25:03 crc kubenswrapper[4693]: I1008 08:25:03.937956 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e3838aad-d20c-4a45-9a03-377f22273bfc-host\") pod \"crc-debug-l9cdw\" (UID: \"e3838aad-d20c-4a45-9a03-377f22273bfc\") " pod="openshift-must-gather-rtcj7/crc-debug-l9cdw" Oct 08 08:25:04 crc kubenswrapper[4693]: I1008 08:25:04.040629 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5brj\" (UniqueName: \"kubernetes.io/projected/e3838aad-d20c-4a45-9a03-377f22273bfc-kube-api-access-j5brj\") pod \"crc-debug-l9cdw\" (UID: \"e3838aad-d20c-4a45-9a03-377f22273bfc\") " pod="openshift-must-gather-rtcj7/crc-debug-l9cdw" Oct 08 08:25:04 crc kubenswrapper[4693]: I1008 08:25:04.041165 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e3838aad-d20c-4a45-9a03-377f22273bfc-host\") pod \"crc-debug-l9cdw\" (UID: \"e3838aad-d20c-4a45-9a03-377f22273bfc\") " pod="openshift-must-gather-rtcj7/crc-debug-l9cdw" Oct 08 08:25:04 crc kubenswrapper[4693]: I1008 08:25:04.041302 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e3838aad-d20c-4a45-9a03-377f22273bfc-host\") pod \"crc-debug-l9cdw\" (UID: \"e3838aad-d20c-4a45-9a03-377f22273bfc\") " pod="openshift-must-gather-rtcj7/crc-debug-l9cdw" Oct 08 08:25:04 crc kubenswrapper[4693]: I1008 08:25:04.062726 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5brj\" (UniqueName: \"kubernetes.io/projected/e3838aad-d20c-4a45-9a03-377f22273bfc-kube-api-access-j5brj\") pod \"crc-debug-l9cdw\" (UID: \"e3838aad-d20c-4a45-9a03-377f22273bfc\") " pod="openshift-must-gather-rtcj7/crc-debug-l9cdw" Oct 08 08:25:04 crc kubenswrapper[4693]: I1008 08:25:04.202225 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rtcj7/crc-debug-l9cdw" Oct 08 08:25:04 crc kubenswrapper[4693]: I1008 08:25:04.749267 4693 generic.go:334] "Generic (PLEG): container finished" podID="e3838aad-d20c-4a45-9a03-377f22273bfc" containerID="3fc535f5a8d5e5cd81ae9efd2c698335b9e535474f51ebdc265dcc6aaac842b8" exitCode=0 Oct 08 08:25:04 crc kubenswrapper[4693]: I1008 08:25:04.749362 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rtcj7/crc-debug-l9cdw" event={"ID":"e3838aad-d20c-4a45-9a03-377f22273bfc","Type":"ContainerDied","Data":"3fc535f5a8d5e5cd81ae9efd2c698335b9e535474f51ebdc265dcc6aaac842b8"} Oct 08 08:25:04 crc kubenswrapper[4693]: I1008 08:25:04.749712 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rtcj7/crc-debug-l9cdw" event={"ID":"e3838aad-d20c-4a45-9a03-377f22273bfc","Type":"ContainerStarted","Data":"bc48e59a701494acfe20eefaf807ab50804e6d1146b550861af4dab244385e27"} Oct 08 08:25:04 crc kubenswrapper[4693]: I1008 08:25:04.798315 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-rtcj7/crc-debug-l9cdw"] Oct 08 08:25:04 crc kubenswrapper[4693]: I1008 08:25:04.806826 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-rtcj7/crc-debug-l9cdw"] Oct 08 08:25:06 crc kubenswrapper[4693]: I1008 08:25:06.357862 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rtcj7/crc-debug-l9cdw" Oct 08 08:25:06 crc kubenswrapper[4693]: I1008 08:25:06.393417 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j5brj\" (UniqueName: \"kubernetes.io/projected/e3838aad-d20c-4a45-9a03-377f22273bfc-kube-api-access-j5brj\") pod \"e3838aad-d20c-4a45-9a03-377f22273bfc\" (UID: \"e3838aad-d20c-4a45-9a03-377f22273bfc\") " Oct 08 08:25:06 crc kubenswrapper[4693]: I1008 08:25:06.393627 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e3838aad-d20c-4a45-9a03-377f22273bfc-host\") pod \"e3838aad-d20c-4a45-9a03-377f22273bfc\" (UID: \"e3838aad-d20c-4a45-9a03-377f22273bfc\") " Oct 08 08:25:06 crc kubenswrapper[4693]: I1008 08:25:06.393785 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e3838aad-d20c-4a45-9a03-377f22273bfc-host" (OuterVolumeSpecName: "host") pod "e3838aad-d20c-4a45-9a03-377f22273bfc" (UID: "e3838aad-d20c-4a45-9a03-377f22273bfc"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 08 08:25:06 crc kubenswrapper[4693]: I1008 08:25:06.394304 4693 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e3838aad-d20c-4a45-9a03-377f22273bfc-host\") on node \"crc\" DevicePath \"\"" Oct 08 08:25:06 crc kubenswrapper[4693]: I1008 08:25:06.402122 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3838aad-d20c-4a45-9a03-377f22273bfc-kube-api-access-j5brj" (OuterVolumeSpecName: "kube-api-access-j5brj") pod "e3838aad-d20c-4a45-9a03-377f22273bfc" (UID: "e3838aad-d20c-4a45-9a03-377f22273bfc"). InnerVolumeSpecName "kube-api-access-j5brj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 08:25:06 crc kubenswrapper[4693]: I1008 08:25:06.496953 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j5brj\" (UniqueName: \"kubernetes.io/projected/e3838aad-d20c-4a45-9a03-377f22273bfc-kube-api-access-j5brj\") on node \"crc\" DevicePath \"\"" Oct 08 08:25:06 crc kubenswrapper[4693]: I1008 08:25:06.777475 4693 scope.go:117] "RemoveContainer" containerID="3fc535f5a8d5e5cd81ae9efd2c698335b9e535474f51ebdc265dcc6aaac842b8" Oct 08 08:25:06 crc kubenswrapper[4693]: I1008 08:25:06.777535 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rtcj7/crc-debug-l9cdw" Oct 08 08:25:07 crc kubenswrapper[4693]: I1008 08:25:07.242978 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn_b56656be-6638-40df-b380-d88dbc891f53/util/0.log" Oct 08 08:25:07 crc kubenswrapper[4693]: I1008 08:25:07.377023 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3838aad-d20c-4a45-9a03-377f22273bfc" path="/var/lib/kubelet/pods/e3838aad-d20c-4a45-9a03-377f22273bfc/volumes" Oct 08 08:25:07 crc kubenswrapper[4693]: I1008 08:25:07.450473 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn_b56656be-6638-40df-b380-d88dbc891f53/pull/0.log" Oct 08 08:25:07 crc kubenswrapper[4693]: I1008 08:25:07.486007 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn_b56656be-6638-40df-b380-d88dbc891f53/pull/0.log" Oct 08 08:25:07 crc kubenswrapper[4693]: I1008 08:25:07.533065 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn_b56656be-6638-40df-b380-d88dbc891f53/util/0.log" Oct 08 08:25:07 crc kubenswrapper[4693]: I1008 08:25:07.686641 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn_b56656be-6638-40df-b380-d88dbc891f53/util/0.log" Oct 08 08:25:08 crc kubenswrapper[4693]: I1008 08:25:08.228289 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn_b56656be-6638-40df-b380-d88dbc891f53/pull/0.log" Oct 08 08:25:08 crc kubenswrapper[4693]: I1008 08:25:08.310211 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b78b27a4ac452b179bd85509b7e1a3012e65e5c7ef95898e6118adafe2f8xcn_b56656be-6638-40df-b380-d88dbc891f53/extract/0.log" Oct 08 08:25:08 crc kubenswrapper[4693]: I1008 08:25:08.314027 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-58c4cd55f4-5tq42_cd94a973-75b2-4722-a298-16e6bd67aa61/kube-rbac-proxy/0.log" Oct 08 08:25:08 crc kubenswrapper[4693]: I1008 08:25:08.501403 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-58c4cd55f4-5tq42_cd94a973-75b2-4722-a298-16e6bd67aa61/manager/0.log" Oct 08 08:25:08 crc kubenswrapper[4693]: I1008 08:25:08.506500 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-7d4d4f8d-d2ztn_b855e40c-e0b0-4322-8099-d4e51c0b92f1/kube-rbac-proxy/0.log" Oct 08 08:25:08 crc kubenswrapper[4693]: I1008 08:25:08.571723 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-7d4d4f8d-d2ztn_b855e40c-e0b0-4322-8099-d4e51c0b92f1/manager/0.log" Oct 08 08:25:08 crc kubenswrapper[4693]: I1008 08:25:08.710956 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-75dfd9b554-dmgt7_2f8dab68-da73-412a-bf83-95f2ac37f289/kube-rbac-proxy/0.log" Oct 08 08:25:08 crc kubenswrapper[4693]: I1008 08:25:08.762771 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-75dfd9b554-dmgt7_2f8dab68-da73-412a-bf83-95f2ac37f289/manager/0.log" Oct 08 08:25:08 crc kubenswrapper[4693]: I1008 08:25:08.894497 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5dc44df7d5-hbqzd_72f3f2ae-ba07-4045-9ac4-fc4f0dee2682/kube-rbac-proxy/0.log" Oct 08 08:25:08 crc kubenswrapper[4693]: I1008 08:25:08.972945 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5dc44df7d5-hbqzd_72f3f2ae-ba07-4045-9ac4-fc4f0dee2682/manager/0.log" Oct 08 08:25:09 crc kubenswrapper[4693]: I1008 08:25:09.008384 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-54b4974c45-h25pk_81212063-ccc7-423c-b817-60f7280ee4f9/kube-rbac-proxy/0.log" Oct 08 08:25:09 crc kubenswrapper[4693]: I1008 08:25:09.111428 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-54b4974c45-h25pk_81212063-ccc7-423c-b817-60f7280ee4f9/manager/0.log" Oct 08 08:25:09 crc kubenswrapper[4693]: I1008 08:25:09.182274 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-76d5b87f47-njkdz_bfc23a1a-faab-44e8-91f7-29d4e95f0fdc/kube-rbac-proxy/0.log" Oct 08 08:25:09 crc kubenswrapper[4693]: I1008 08:25:09.189142 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-76d5b87f47-njkdz_bfc23a1a-faab-44e8-91f7-29d4e95f0fdc/manager/0.log" Oct 08 08:25:09 crc kubenswrapper[4693]: I1008 08:25:09.329304 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-658588b8c9-6vt47_6fc858ec-6edd-4e45-ba44-fe2ea26a0614/kube-rbac-proxy/0.log" Oct 08 08:25:09 crc kubenswrapper[4693]: I1008 08:25:09.490583 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-649675d675-w96xx_d96bb98a-f416-4d93-b145-37632210f2f8/kube-rbac-proxy/0.log" Oct 08 08:25:09 crc kubenswrapper[4693]: I1008 08:25:09.520375 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-658588b8c9-6vt47_6fc858ec-6edd-4e45-ba44-fe2ea26a0614/manager/0.log" Oct 08 08:25:09 crc kubenswrapper[4693]: I1008 08:25:09.533048 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-649675d675-w96xx_d96bb98a-f416-4d93-b145-37632210f2f8/manager/0.log" Oct 08 08:25:09 crc kubenswrapper[4693]: I1008 08:25:09.627047 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7b5ccf6d9c-vqp2x_d26d490a-dba0-46d4-b636-836a4dde53be/kube-rbac-proxy/0.log" Oct 08 08:25:09 crc kubenswrapper[4693]: I1008 08:25:09.703697 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-65d89cfd9f-blmhk_b1c54c65-b7f3-4f2e-91c8-c04f25b42d3f/kube-rbac-proxy/0.log" Oct 08 08:25:09 crc kubenswrapper[4693]: I1008 08:25:09.750512 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-65d89cfd9f-blmhk_b1c54c65-b7f3-4f2e-91c8-c04f25b42d3f/manager/0.log" Oct 08 08:25:09 crc kubenswrapper[4693]: I1008 08:25:09.754418 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7b5ccf6d9c-vqp2x_d26d490a-dba0-46d4-b636-836a4dde53be/manager/0.log" Oct 08 08:25:09 crc kubenswrapper[4693]: I1008 08:25:09.884127 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-6cd6d7bdf5-65sjq_6772aabf-b5fa-4fc7-8925-0926ed242e9b/kube-rbac-proxy/0.log" Oct 08 08:25:09 crc kubenswrapper[4693]: I1008 08:25:09.917406 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-6cd6d7bdf5-65sjq_6772aabf-b5fa-4fc7-8925-0926ed242e9b/manager/0.log" Oct 08 08:25:09 crc kubenswrapper[4693]: I1008 08:25:09.945719 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-8d984cc4d-567tm_3fedcb35-9741-40ee-bdb0-a1d78a5da3e6/kube-rbac-proxy/0.log" Oct 08 08:25:10 crc kubenswrapper[4693]: I1008 08:25:10.087624 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-8d984cc4d-567tm_3fedcb35-9741-40ee-bdb0-a1d78a5da3e6/manager/0.log" Oct 08 08:25:10 crc kubenswrapper[4693]: I1008 08:25:10.153358 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-7c7fc454ff-lhs5r_cba353e7-9050-4433-a6b6-2ca4f67d077a/kube-rbac-proxy/0.log" Oct 08 08:25:10 crc kubenswrapper[4693]: I1008 08:25:10.276039 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-7c7fc454ff-lhs5r_cba353e7-9050-4433-a6b6-2ca4f67d077a/manager/0.log" Oct 08 08:25:10 crc kubenswrapper[4693]: I1008 08:25:10.365360 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7468f855d8-4zvpz_422c537a-d341-45ac-ac02-3fb221b66ed4/manager/0.log" Oct 08 08:25:10 crc kubenswrapper[4693]: I1008 08:25:10.366763 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7468f855d8-4zvpz_422c537a-d341-45ac-ac02-3fb221b66ed4/kube-rbac-proxy/0.log" Oct 08 08:25:10 crc kubenswrapper[4693]: I1008 08:25:10.488677 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w_a45ca91b-ddca-4c17-ab8b-d106345451d3/kube-rbac-proxy/0.log" Oct 08 08:25:10 crc kubenswrapper[4693]: I1008 08:25:10.526552 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-5dfbbd665c2kg8w_a45ca91b-ddca-4c17-ab8b-d106345451d3/manager/0.log" Oct 08 08:25:10 crc kubenswrapper[4693]: I1008 08:25:10.579249 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7f66b9c549-m8hmw_5bfb052c-4d4a-47df-bb42-25424b56cb92/kube-rbac-proxy/0.log" Oct 08 08:25:10 crc kubenswrapper[4693]: I1008 08:25:10.717803 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-m9pt7"] Oct 08 08:25:10 crc kubenswrapper[4693]: E1008 08:25:10.721478 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3838aad-d20c-4a45-9a03-377f22273bfc" containerName="container-00" Oct 08 08:25:10 crc kubenswrapper[4693]: I1008 08:25:10.721504 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3838aad-d20c-4a45-9a03-377f22273bfc" containerName="container-00" Oct 08 08:25:10 crc kubenswrapper[4693]: I1008 08:25:10.721767 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3838aad-d20c-4a45-9a03-377f22273bfc" containerName="container-00" Oct 08 08:25:10 crc kubenswrapper[4693]: I1008 08:25:10.724558 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m9pt7" Oct 08 08:25:10 crc kubenswrapper[4693]: I1008 08:25:10.739332 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m9pt7"] Oct 08 08:25:10 crc kubenswrapper[4693]: I1008 08:25:10.814285 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-77c8f66d44-864bb_23996d41-f11a-4a8a-8a71-3e7f93978efc/kube-rbac-proxy/0.log" Oct 08 08:25:10 crc kubenswrapper[4693]: I1008 08:25:10.882314 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc-utilities\") pod \"certified-operators-m9pt7\" (UID: \"f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc\") " pod="openshift-marketplace/certified-operators-m9pt7" Oct 08 08:25:10 crc kubenswrapper[4693]: I1008 08:25:10.882437 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc-catalog-content\") pod \"certified-operators-m9pt7\" (UID: \"f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc\") " pod="openshift-marketplace/certified-operators-m9pt7" Oct 08 08:25:10 crc kubenswrapper[4693]: I1008 08:25:10.882513 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74j4h\" (UniqueName: \"kubernetes.io/projected/f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc-kube-api-access-74j4h\") pod \"certified-operators-m9pt7\" (UID: \"f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc\") " pod="openshift-marketplace/certified-operators-m9pt7" Oct 08 08:25:10 crc kubenswrapper[4693]: I1008 08:25:10.921684 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-t9tkr"] Oct 08 08:25:10 crc kubenswrapper[4693]: I1008 08:25:10.939131 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t9tkr" Oct 08 08:25:10 crc kubenswrapper[4693]: I1008 08:25:10.945754 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-t9tkr"] Oct 08 08:25:10 crc kubenswrapper[4693]: I1008 08:25:10.983997 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74j4h\" (UniqueName: \"kubernetes.io/projected/f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc-kube-api-access-74j4h\") pod \"certified-operators-m9pt7\" (UID: \"f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc\") " pod="openshift-marketplace/certified-operators-m9pt7" Oct 08 08:25:10 crc kubenswrapper[4693]: I1008 08:25:10.984113 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc-utilities\") pod \"certified-operators-m9pt7\" (UID: \"f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc\") " pod="openshift-marketplace/certified-operators-m9pt7" Oct 08 08:25:10 crc kubenswrapper[4693]: I1008 08:25:10.984160 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc-catalog-content\") pod \"certified-operators-m9pt7\" (UID: \"f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc\") " pod="openshift-marketplace/certified-operators-m9pt7" Oct 08 08:25:10 crc kubenswrapper[4693]: I1008 08:25:10.984657 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc-catalog-content\") pod \"certified-operators-m9pt7\" (UID: \"f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc\") " pod="openshift-marketplace/certified-operators-m9pt7" Oct 08 08:25:10 crc kubenswrapper[4693]: I1008 08:25:10.985171 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc-utilities\") pod \"certified-operators-m9pt7\" (UID: \"f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc\") " pod="openshift-marketplace/certified-operators-m9pt7" Oct 08 08:25:10 crc kubenswrapper[4693]: I1008 08:25:10.990968 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-qbdg8_875357f9-bf15-47db-83a9-12868aca6f98/registry-server/0.log" Oct 08 08:25:11 crc kubenswrapper[4693]: I1008 08:25:11.003597 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74j4h\" (UniqueName: \"kubernetes.io/projected/f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc-kube-api-access-74j4h\") pod \"certified-operators-m9pt7\" (UID: \"f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc\") " pod="openshift-marketplace/certified-operators-m9pt7" Oct 08 08:25:11 crc kubenswrapper[4693]: I1008 08:25:11.035341 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-77c8f66d44-864bb_23996d41-f11a-4a8a-8a71-3e7f93978efc/operator/0.log" Oct 08 08:25:11 crc kubenswrapper[4693]: I1008 08:25:11.041956 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m9pt7" Oct 08 08:25:11 crc kubenswrapper[4693]: I1008 08:25:11.086520 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81c61b4c-db85-4686-8814-f74ad46ddf5e-utilities\") pod \"community-operators-t9tkr\" (UID: \"81c61b4c-db85-4686-8814-f74ad46ddf5e\") " pod="openshift-marketplace/community-operators-t9tkr" Oct 08 08:25:11 crc kubenswrapper[4693]: I1008 08:25:11.089731 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81c61b4c-db85-4686-8814-f74ad46ddf5e-catalog-content\") pod \"community-operators-t9tkr\" (UID: \"81c61b4c-db85-4686-8814-f74ad46ddf5e\") " pod="openshift-marketplace/community-operators-t9tkr" Oct 08 08:25:11 crc kubenswrapper[4693]: I1008 08:25:11.089837 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hn94t\" (UniqueName: \"kubernetes.io/projected/81c61b4c-db85-4686-8814-f74ad46ddf5e-kube-api-access-hn94t\") pod \"community-operators-t9tkr\" (UID: \"81c61b4c-db85-4686-8814-f74ad46ddf5e\") " pod="openshift-marketplace/community-operators-t9tkr" Oct 08 08:25:11 crc kubenswrapper[4693]: I1008 08:25:11.191205 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hn94t\" (UniqueName: \"kubernetes.io/projected/81c61b4c-db85-4686-8814-f74ad46ddf5e-kube-api-access-hn94t\") pod \"community-operators-t9tkr\" (UID: \"81c61b4c-db85-4686-8814-f74ad46ddf5e\") " pod="openshift-marketplace/community-operators-t9tkr" Oct 08 08:25:11 crc kubenswrapper[4693]: I1008 08:25:11.191376 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81c61b4c-db85-4686-8814-f74ad46ddf5e-utilities\") pod \"community-operators-t9tkr\" (UID: \"81c61b4c-db85-4686-8814-f74ad46ddf5e\") " pod="openshift-marketplace/community-operators-t9tkr" Oct 08 08:25:11 crc kubenswrapper[4693]: I1008 08:25:11.191430 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81c61b4c-db85-4686-8814-f74ad46ddf5e-catalog-content\") pod \"community-operators-t9tkr\" (UID: \"81c61b4c-db85-4686-8814-f74ad46ddf5e\") " pod="openshift-marketplace/community-operators-t9tkr" Oct 08 08:25:11 crc kubenswrapper[4693]: I1008 08:25:11.193108 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81c61b4c-db85-4686-8814-f74ad46ddf5e-catalog-content\") pod \"community-operators-t9tkr\" (UID: \"81c61b4c-db85-4686-8814-f74ad46ddf5e\") " pod="openshift-marketplace/community-operators-t9tkr" Oct 08 08:25:11 crc kubenswrapper[4693]: I1008 08:25:11.197171 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81c61b4c-db85-4686-8814-f74ad46ddf5e-utilities\") pod \"community-operators-t9tkr\" (UID: \"81c61b4c-db85-4686-8814-f74ad46ddf5e\") " pod="openshift-marketplace/community-operators-t9tkr" Oct 08 08:25:11 crc kubenswrapper[4693]: I1008 08:25:11.227829 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-6d8b6f9b9-jtx9z_a8912110-fa72-4e6b-9c38-7b62b34772fa/kube-rbac-proxy/0.log" Oct 08 08:25:11 crc kubenswrapper[4693]: I1008 08:25:11.257993 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hn94t\" (UniqueName: \"kubernetes.io/projected/81c61b4c-db85-4686-8814-f74ad46ddf5e-kube-api-access-hn94t\") pod \"community-operators-t9tkr\" (UID: \"81c61b4c-db85-4686-8814-f74ad46ddf5e\") " pod="openshift-marketplace/community-operators-t9tkr" Oct 08 08:25:11 crc kubenswrapper[4693]: I1008 08:25:11.263864 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t9tkr" Oct 08 08:25:11 crc kubenswrapper[4693]: I1008 08:25:11.661390 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-6d8b6f9b9-jtx9z_a8912110-fa72-4e6b-9c38-7b62b34772fa/manager/0.log" Oct 08 08:25:11 crc kubenswrapper[4693]: I1008 08:25:11.695780 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m9pt7"] Oct 08 08:25:11 crc kubenswrapper[4693]: I1008 08:25:11.859329 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-t9tkr"] Oct 08 08:25:11 crc kubenswrapper[4693]: I1008 08:25:11.869729 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m9pt7" event={"ID":"f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc","Type":"ContainerStarted","Data":"cf6583d05e810a42b0c4773d1791fe9552a10f4f5f800a528d3dbb0657c59354"} Oct 08 08:25:11 crc kubenswrapper[4693]: I1008 08:25:11.875964 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-54689d9f88-h2npd_9d40d3d9-711e-461b-b859-684b1af38ee9/kube-rbac-proxy/0.log" Oct 08 08:25:11 crc kubenswrapper[4693]: W1008 08:25:11.882113 4693 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81c61b4c_db85_4686_8814_f74ad46ddf5e.slice/crio-6fd6e913ce1bcd6e4bc288876f0cb718c1e3103235c30a3f3048970b792080e3 WatchSource:0}: Error finding container 6fd6e913ce1bcd6e4bc288876f0cb718c1e3103235c30a3f3048970b792080e3: Status 404 returned error can't find the container with id 6fd6e913ce1bcd6e4bc288876f0cb718c1e3103235c30a3f3048970b792080e3 Oct 08 08:25:11 crc kubenswrapper[4693]: I1008 08:25:11.931400 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7f66b9c549-m8hmw_5bfb052c-4d4a-47df-bb42-25424b56cb92/manager/0.log" Oct 08 08:25:11 crc kubenswrapper[4693]: I1008 08:25:11.948119 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-54689d9f88-h2npd_9d40d3d9-711e-461b-b859-684b1af38ee9/manager/0.log" Oct 08 08:25:12 crc kubenswrapper[4693]: I1008 08:25:12.099798 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-lqlgw_b2a1cce0-35c1-46ed-b375-bb70c8a7c15f/operator/0.log" Oct 08 08:25:12 crc kubenswrapper[4693]: I1008 08:25:12.183494 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6859f9b676-7sw5d_78a7e32f-67dc-454f-b65c-8a8a2605d139/kube-rbac-proxy/0.log" Oct 08 08:25:12 crc kubenswrapper[4693]: I1008 08:25:12.191492 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6859f9b676-7sw5d_78a7e32f-67dc-454f-b65c-8a8a2605d139/manager/0.log" Oct 08 08:25:12 crc kubenswrapper[4693]: I1008 08:25:12.309460 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5d4d74dd89-6h28f_d3a3ae96-9b43-42ab-b688-95e141f326f4/kube-rbac-proxy/0.log" Oct 08 08:25:12 crc kubenswrapper[4693]: I1008 08:25:12.394369 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd5cb47d7-9bz2k_f1d2f8b8-c6f2-450b-9e54-6a14b37e7a68/kube-rbac-proxy/0.log" Oct 08 08:25:12 crc kubenswrapper[4693]: I1008 08:25:12.427081 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd5cb47d7-9bz2k_f1d2f8b8-c6f2-450b-9e54-6a14b37e7a68/manager/0.log" Oct 08 08:25:12 crc kubenswrapper[4693]: I1008 08:25:12.508921 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5d4d74dd89-6h28f_d3a3ae96-9b43-42ab-b688-95e141f326f4/manager/0.log" Oct 08 08:25:12 crc kubenswrapper[4693]: I1008 08:25:12.577179 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6cbc6dd547-h74m5_09c1a297-4a54-430a-a78e-134db76611b9/kube-rbac-proxy/0.log" Oct 08 08:25:12 crc kubenswrapper[4693]: I1008 08:25:12.628397 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6cbc6dd547-h74m5_09c1a297-4a54-430a-a78e-134db76611b9/manager/0.log" Oct 08 08:25:12 crc kubenswrapper[4693]: I1008 08:25:12.878477 4693 generic.go:334] "Generic (PLEG): container finished" podID="f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc" containerID="7cdc35ec89bb732a6fc4ad62b73d2d7812b80b6f243ba417892389bf980c667d" exitCode=0 Oct 08 08:25:12 crc kubenswrapper[4693]: I1008 08:25:12.878547 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m9pt7" event={"ID":"f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc","Type":"ContainerDied","Data":"7cdc35ec89bb732a6fc4ad62b73d2d7812b80b6f243ba417892389bf980c667d"} Oct 08 08:25:12 crc kubenswrapper[4693]: I1008 08:25:12.882204 4693 generic.go:334] "Generic (PLEG): container finished" podID="81c61b4c-db85-4686-8814-f74ad46ddf5e" containerID="efddb7bbffba32917b2f3482befaf42ee9a745756decbacd0fffd0d9e7fe3f95" exitCode=0 Oct 08 08:25:12 crc kubenswrapper[4693]: I1008 08:25:12.882238 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t9tkr" event={"ID":"81c61b4c-db85-4686-8814-f74ad46ddf5e","Type":"ContainerDied","Data":"efddb7bbffba32917b2f3482befaf42ee9a745756decbacd0fffd0d9e7fe3f95"} Oct 08 08:25:12 crc kubenswrapper[4693]: I1008 08:25:12.882263 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t9tkr" event={"ID":"81c61b4c-db85-4686-8814-f74ad46ddf5e","Type":"ContainerStarted","Data":"6fd6e913ce1bcd6e4bc288876f0cb718c1e3103235c30a3f3048970b792080e3"} Oct 08 08:25:13 crc kubenswrapper[4693]: I1008 08:25:13.899000 4693 generic.go:334] "Generic (PLEG): container finished" podID="f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc" containerID="59817c0d2c1fbeb48255f6c75f6b8d31f4f2eb176019d1096efc3575aac39b42" exitCode=0 Oct 08 08:25:13 crc kubenswrapper[4693]: I1008 08:25:13.899590 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m9pt7" event={"ID":"f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc","Type":"ContainerDied","Data":"59817c0d2c1fbeb48255f6c75f6b8d31f4f2eb176019d1096efc3575aac39b42"} Oct 08 08:25:14 crc kubenswrapper[4693]: I1008 08:25:14.910288 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m9pt7" event={"ID":"f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc","Type":"ContainerStarted","Data":"852936ddf536ed7550f89eb5534a736b81630eca1feefed01b73440ccea6fed7"} Oct 08 08:25:14 crc kubenswrapper[4693]: I1008 08:25:14.912374 4693 generic.go:334] "Generic (PLEG): container finished" podID="81c61b4c-db85-4686-8814-f74ad46ddf5e" containerID="0fc151f5f2eb0065e1c098fb6b0fb1c8eb0363a0d5cf831b249edd1f56109561" exitCode=0 Oct 08 08:25:14 crc kubenswrapper[4693]: I1008 08:25:14.912397 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t9tkr" event={"ID":"81c61b4c-db85-4686-8814-f74ad46ddf5e","Type":"ContainerDied","Data":"0fc151f5f2eb0065e1c098fb6b0fb1c8eb0363a0d5cf831b249edd1f56109561"} Oct 08 08:25:14 crc kubenswrapper[4693]: I1008 08:25:14.929473 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-m9pt7" podStartSLOduration=3.378436344 podStartE2EDuration="4.929454772s" podCreationTimestamp="2025-10-08 08:25:10 +0000 UTC" firstStartedPulling="2025-10-08 08:25:12.880502205 +0000 UTC m=+4098.251467140" lastFinishedPulling="2025-10-08 08:25:14.431520633 +0000 UTC m=+4099.802485568" observedRunningTime="2025-10-08 08:25:14.928034984 +0000 UTC m=+4100.298999919" watchObservedRunningTime="2025-10-08 08:25:14.929454772 +0000 UTC m=+4100.300419707" Oct 08 08:25:16 crc kubenswrapper[4693]: I1008 08:25:16.932572 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t9tkr" event={"ID":"81c61b4c-db85-4686-8814-f74ad46ddf5e","Type":"ContainerStarted","Data":"5e1721d09749a4d1384c4b24cb3b9fd414b1474ea1299f4e70adb6869a13fbfb"} Oct 08 08:25:16 crc kubenswrapper[4693]: I1008 08:25:16.951375 4693 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-t9tkr" podStartSLOduration=4.135454183 podStartE2EDuration="6.95136071s" podCreationTimestamp="2025-10-08 08:25:10 +0000 UTC" firstStartedPulling="2025-10-08 08:25:12.883695489 +0000 UTC m=+4098.254660424" lastFinishedPulling="2025-10-08 08:25:15.699602006 +0000 UTC m=+4101.070566951" observedRunningTime="2025-10-08 08:25:16.94906349 +0000 UTC m=+4102.320028425" watchObservedRunningTime="2025-10-08 08:25:16.95136071 +0000 UTC m=+4102.322325645" Oct 08 08:25:20 crc kubenswrapper[4693]: I1008 08:25:20.355447 4693 scope.go:117] "RemoveContainer" containerID="15d36d043a23583cb9317b8d9039527c1ef3893625605ac4444543338b531681" Oct 08 08:25:21 crc kubenswrapper[4693]: I1008 08:25:21.043382 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-m9pt7" Oct 08 08:25:21 crc kubenswrapper[4693]: I1008 08:25:21.043711 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-m9pt7" Oct 08 08:25:21 crc kubenswrapper[4693]: I1008 08:25:21.102232 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-m9pt7" Oct 08 08:25:21 crc kubenswrapper[4693]: I1008 08:25:21.265459 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-t9tkr" Oct 08 08:25:21 crc kubenswrapper[4693]: I1008 08:25:21.265506 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-t9tkr" Oct 08 08:25:21 crc kubenswrapper[4693]: I1008 08:25:21.314865 4693 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-t9tkr" Oct 08 08:25:22 crc kubenswrapper[4693]: I1008 08:25:22.049828 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-t9tkr" Oct 08 08:25:22 crc kubenswrapper[4693]: I1008 08:25:22.049895 4693 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-m9pt7" Oct 08 08:25:23 crc kubenswrapper[4693]: I1008 08:25:23.314392 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m9pt7"] Oct 08 08:25:23 crc kubenswrapper[4693]: I1008 08:25:23.903486 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-t9tkr"] Oct 08 08:25:24 crc kubenswrapper[4693]: I1008 08:25:24.023108 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-t9tkr" podUID="81c61b4c-db85-4686-8814-f74ad46ddf5e" containerName="registry-server" containerID="cri-o://5e1721d09749a4d1384c4b24cb3b9fd414b1474ea1299f4e70adb6869a13fbfb" gracePeriod=2 Oct 08 08:25:24 crc kubenswrapper[4693]: I1008 08:25:24.023168 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-m9pt7" podUID="f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc" containerName="registry-server" containerID="cri-o://852936ddf536ed7550f89eb5534a736b81630eca1feefed01b73440ccea6fed7" gracePeriod=2 Oct 08 08:25:24 crc kubenswrapper[4693]: E1008 08:25:24.366624 4693 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81c61b4c_db85_4686_8814_f74ad46ddf5e.slice/crio-conmon-5e1721d09749a4d1384c4b24cb3b9fd414b1474ea1299f4e70adb6869a13fbfb.scope\": RecentStats: unable to find data in memory cache]" Oct 08 08:25:24 crc kubenswrapper[4693]: I1008 08:25:24.551495 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t9tkr" Oct 08 08:25:24 crc kubenswrapper[4693]: I1008 08:25:24.556600 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m9pt7" Oct 08 08:25:24 crc kubenswrapper[4693]: I1008 08:25:24.678670 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc-catalog-content\") pod \"f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc\" (UID: \"f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc\") " Oct 08 08:25:24 crc kubenswrapper[4693]: I1008 08:25:24.678777 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc-utilities\") pod \"f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc\" (UID: \"f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc\") " Oct 08 08:25:24 crc kubenswrapper[4693]: I1008 08:25:24.678830 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hn94t\" (UniqueName: \"kubernetes.io/projected/81c61b4c-db85-4686-8814-f74ad46ddf5e-kube-api-access-hn94t\") pod \"81c61b4c-db85-4686-8814-f74ad46ddf5e\" (UID: \"81c61b4c-db85-4686-8814-f74ad46ddf5e\") " Oct 08 08:25:24 crc kubenswrapper[4693]: I1008 08:25:24.678906 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81c61b4c-db85-4686-8814-f74ad46ddf5e-catalog-content\") pod \"81c61b4c-db85-4686-8814-f74ad46ddf5e\" (UID: \"81c61b4c-db85-4686-8814-f74ad46ddf5e\") " Oct 08 08:25:24 crc kubenswrapper[4693]: I1008 08:25:24.678964 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81c61b4c-db85-4686-8814-f74ad46ddf5e-utilities\") pod \"81c61b4c-db85-4686-8814-f74ad46ddf5e\" (UID: \"81c61b4c-db85-4686-8814-f74ad46ddf5e\") " Oct 08 08:25:24 crc kubenswrapper[4693]: I1008 08:25:24.679005 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74j4h\" (UniqueName: \"kubernetes.io/projected/f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc-kube-api-access-74j4h\") pod \"f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc\" (UID: \"f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc\") " Oct 08 08:25:24 crc kubenswrapper[4693]: I1008 08:25:24.679630 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81c61b4c-db85-4686-8814-f74ad46ddf5e-utilities" (OuterVolumeSpecName: "utilities") pod "81c61b4c-db85-4686-8814-f74ad46ddf5e" (UID: "81c61b4c-db85-4686-8814-f74ad46ddf5e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:25:24 crc kubenswrapper[4693]: I1008 08:25:24.681505 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc-utilities" (OuterVolumeSpecName: "utilities") pod "f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc" (UID: "f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:25:24 crc kubenswrapper[4693]: I1008 08:25:24.685388 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc-kube-api-access-74j4h" (OuterVolumeSpecName: "kube-api-access-74j4h") pod "f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc" (UID: "f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc"). InnerVolumeSpecName "kube-api-access-74j4h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 08:25:24 crc kubenswrapper[4693]: I1008 08:25:24.686248 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81c61b4c-db85-4686-8814-f74ad46ddf5e-kube-api-access-hn94t" (OuterVolumeSpecName: "kube-api-access-hn94t") pod "81c61b4c-db85-4686-8814-f74ad46ddf5e" (UID: "81c61b4c-db85-4686-8814-f74ad46ddf5e"). InnerVolumeSpecName "kube-api-access-hn94t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 08:25:24 crc kubenswrapper[4693]: I1008 08:25:24.730220 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81c61b4c-db85-4686-8814-f74ad46ddf5e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "81c61b4c-db85-4686-8814-f74ad46ddf5e" (UID: "81c61b4c-db85-4686-8814-f74ad46ddf5e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:25:24 crc kubenswrapper[4693]: I1008 08:25:24.735331 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc" (UID: "f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:25:24 crc kubenswrapper[4693]: I1008 08:25:24.781538 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74j4h\" (UniqueName: \"kubernetes.io/projected/f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc-kube-api-access-74j4h\") on node \"crc\" DevicePath \"\"" Oct 08 08:25:24 crc kubenswrapper[4693]: I1008 08:25:24.781597 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 08:25:24 crc kubenswrapper[4693]: I1008 08:25:24.781615 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 08:25:24 crc kubenswrapper[4693]: I1008 08:25:24.781630 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hn94t\" (UniqueName: \"kubernetes.io/projected/81c61b4c-db85-4686-8814-f74ad46ddf5e-kube-api-access-hn94t\") on node \"crc\" DevicePath \"\"" Oct 08 08:25:24 crc kubenswrapper[4693]: I1008 08:25:24.781643 4693 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81c61b4c-db85-4686-8814-f74ad46ddf5e-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 08 08:25:24 crc kubenswrapper[4693]: I1008 08:25:24.781656 4693 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81c61b4c-db85-4686-8814-f74ad46ddf5e-utilities\") on node \"crc\" DevicePath \"\"" Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.039094 4693 generic.go:334] "Generic (PLEG): container finished" podID="f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc" containerID="852936ddf536ed7550f89eb5534a736b81630eca1feefed01b73440ccea6fed7" exitCode=0 Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.039822 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m9pt7" event={"ID":"f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc","Type":"ContainerDied","Data":"852936ddf536ed7550f89eb5534a736b81630eca1feefed01b73440ccea6fed7"} Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.039971 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m9pt7" event={"ID":"f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc","Type":"ContainerDied","Data":"cf6583d05e810a42b0c4773d1791fe9552a10f4f5f800a528d3dbb0657c59354"} Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.040108 4693 scope.go:117] "RemoveContainer" containerID="852936ddf536ed7550f89eb5534a736b81630eca1feefed01b73440ccea6fed7" Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.040347 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m9pt7" Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.070429 4693 generic.go:334] "Generic (PLEG): container finished" podID="81c61b4c-db85-4686-8814-f74ad46ddf5e" containerID="5e1721d09749a4d1384c4b24cb3b9fd414b1474ea1299f4e70adb6869a13fbfb" exitCode=0 Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.070482 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t9tkr" event={"ID":"81c61b4c-db85-4686-8814-f74ad46ddf5e","Type":"ContainerDied","Data":"5e1721d09749a4d1384c4b24cb3b9fd414b1474ea1299f4e70adb6869a13fbfb"} Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.070529 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t9tkr" event={"ID":"81c61b4c-db85-4686-8814-f74ad46ddf5e","Type":"ContainerDied","Data":"6fd6e913ce1bcd6e4bc288876f0cb718c1e3103235c30a3f3048970b792080e3"} Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.070627 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t9tkr" Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.091747 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m9pt7"] Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.100519 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-m9pt7"] Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.116464 4693 scope.go:117] "RemoveContainer" containerID="59817c0d2c1fbeb48255f6c75f6b8d31f4f2eb176019d1096efc3575aac39b42" Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.117600 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-t9tkr"] Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.129676 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-t9tkr"] Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.170991 4693 scope.go:117] "RemoveContainer" containerID="7cdc35ec89bb732a6fc4ad62b73d2d7812b80b6f243ba417892389bf980c667d" Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.229466 4693 scope.go:117] "RemoveContainer" containerID="852936ddf536ed7550f89eb5534a736b81630eca1feefed01b73440ccea6fed7" Oct 08 08:25:25 crc kubenswrapper[4693]: E1008 08:25:25.230011 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"852936ddf536ed7550f89eb5534a736b81630eca1feefed01b73440ccea6fed7\": container with ID starting with 852936ddf536ed7550f89eb5534a736b81630eca1feefed01b73440ccea6fed7 not found: ID does not exist" containerID="852936ddf536ed7550f89eb5534a736b81630eca1feefed01b73440ccea6fed7" Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.230086 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"852936ddf536ed7550f89eb5534a736b81630eca1feefed01b73440ccea6fed7"} err="failed to get container status \"852936ddf536ed7550f89eb5534a736b81630eca1feefed01b73440ccea6fed7\": rpc error: code = NotFound desc = could not find container \"852936ddf536ed7550f89eb5534a736b81630eca1feefed01b73440ccea6fed7\": container with ID starting with 852936ddf536ed7550f89eb5534a736b81630eca1feefed01b73440ccea6fed7 not found: ID does not exist" Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.230129 4693 scope.go:117] "RemoveContainer" containerID="59817c0d2c1fbeb48255f6c75f6b8d31f4f2eb176019d1096efc3575aac39b42" Oct 08 08:25:25 crc kubenswrapper[4693]: E1008 08:25:25.230580 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59817c0d2c1fbeb48255f6c75f6b8d31f4f2eb176019d1096efc3575aac39b42\": container with ID starting with 59817c0d2c1fbeb48255f6c75f6b8d31f4f2eb176019d1096efc3575aac39b42 not found: ID does not exist" containerID="59817c0d2c1fbeb48255f6c75f6b8d31f4f2eb176019d1096efc3575aac39b42" Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.230733 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59817c0d2c1fbeb48255f6c75f6b8d31f4f2eb176019d1096efc3575aac39b42"} err="failed to get container status \"59817c0d2c1fbeb48255f6c75f6b8d31f4f2eb176019d1096efc3575aac39b42\": rpc error: code = NotFound desc = could not find container \"59817c0d2c1fbeb48255f6c75f6b8d31f4f2eb176019d1096efc3575aac39b42\": container with ID starting with 59817c0d2c1fbeb48255f6c75f6b8d31f4f2eb176019d1096efc3575aac39b42 not found: ID does not exist" Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.230876 4693 scope.go:117] "RemoveContainer" containerID="7cdc35ec89bb732a6fc4ad62b73d2d7812b80b6f243ba417892389bf980c667d" Oct 08 08:25:25 crc kubenswrapper[4693]: E1008 08:25:25.231323 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7cdc35ec89bb732a6fc4ad62b73d2d7812b80b6f243ba417892389bf980c667d\": container with ID starting with 7cdc35ec89bb732a6fc4ad62b73d2d7812b80b6f243ba417892389bf980c667d not found: ID does not exist" containerID="7cdc35ec89bb732a6fc4ad62b73d2d7812b80b6f243ba417892389bf980c667d" Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.231380 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7cdc35ec89bb732a6fc4ad62b73d2d7812b80b6f243ba417892389bf980c667d"} err="failed to get container status \"7cdc35ec89bb732a6fc4ad62b73d2d7812b80b6f243ba417892389bf980c667d\": rpc error: code = NotFound desc = could not find container \"7cdc35ec89bb732a6fc4ad62b73d2d7812b80b6f243ba417892389bf980c667d\": container with ID starting with 7cdc35ec89bb732a6fc4ad62b73d2d7812b80b6f243ba417892389bf980c667d not found: ID does not exist" Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.231408 4693 scope.go:117] "RemoveContainer" containerID="5e1721d09749a4d1384c4b24cb3b9fd414b1474ea1299f4e70adb6869a13fbfb" Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.270931 4693 scope.go:117] "RemoveContainer" containerID="0fc151f5f2eb0065e1c098fb6b0fb1c8eb0363a0d5cf831b249edd1f56109561" Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.294075 4693 scope.go:117] "RemoveContainer" containerID="efddb7bbffba32917b2f3482befaf42ee9a745756decbacd0fffd0d9e7fe3f95" Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.330132 4693 scope.go:117] "RemoveContainer" containerID="5e1721d09749a4d1384c4b24cb3b9fd414b1474ea1299f4e70adb6869a13fbfb" Oct 08 08:25:25 crc kubenswrapper[4693]: E1008 08:25:25.330657 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e1721d09749a4d1384c4b24cb3b9fd414b1474ea1299f4e70adb6869a13fbfb\": container with ID starting with 5e1721d09749a4d1384c4b24cb3b9fd414b1474ea1299f4e70adb6869a13fbfb not found: ID does not exist" containerID="5e1721d09749a4d1384c4b24cb3b9fd414b1474ea1299f4e70adb6869a13fbfb" Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.330701 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e1721d09749a4d1384c4b24cb3b9fd414b1474ea1299f4e70adb6869a13fbfb"} err="failed to get container status \"5e1721d09749a4d1384c4b24cb3b9fd414b1474ea1299f4e70adb6869a13fbfb\": rpc error: code = NotFound desc = could not find container \"5e1721d09749a4d1384c4b24cb3b9fd414b1474ea1299f4e70adb6869a13fbfb\": container with ID starting with 5e1721d09749a4d1384c4b24cb3b9fd414b1474ea1299f4e70adb6869a13fbfb not found: ID does not exist" Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.330754 4693 scope.go:117] "RemoveContainer" containerID="0fc151f5f2eb0065e1c098fb6b0fb1c8eb0363a0d5cf831b249edd1f56109561" Oct 08 08:25:25 crc kubenswrapper[4693]: E1008 08:25:25.331266 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0fc151f5f2eb0065e1c098fb6b0fb1c8eb0363a0d5cf831b249edd1f56109561\": container with ID starting with 0fc151f5f2eb0065e1c098fb6b0fb1c8eb0363a0d5cf831b249edd1f56109561 not found: ID does not exist" containerID="0fc151f5f2eb0065e1c098fb6b0fb1c8eb0363a0d5cf831b249edd1f56109561" Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.331282 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0fc151f5f2eb0065e1c098fb6b0fb1c8eb0363a0d5cf831b249edd1f56109561"} err="failed to get container status \"0fc151f5f2eb0065e1c098fb6b0fb1c8eb0363a0d5cf831b249edd1f56109561\": rpc error: code = NotFound desc = could not find container \"0fc151f5f2eb0065e1c098fb6b0fb1c8eb0363a0d5cf831b249edd1f56109561\": container with ID starting with 0fc151f5f2eb0065e1c098fb6b0fb1c8eb0363a0d5cf831b249edd1f56109561 not found: ID does not exist" Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.331296 4693 scope.go:117] "RemoveContainer" containerID="efddb7bbffba32917b2f3482befaf42ee9a745756decbacd0fffd0d9e7fe3f95" Oct 08 08:25:25 crc kubenswrapper[4693]: E1008 08:25:25.332089 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"efddb7bbffba32917b2f3482befaf42ee9a745756decbacd0fffd0d9e7fe3f95\": container with ID starting with efddb7bbffba32917b2f3482befaf42ee9a745756decbacd0fffd0d9e7fe3f95 not found: ID does not exist" containerID="efddb7bbffba32917b2f3482befaf42ee9a745756decbacd0fffd0d9e7fe3f95" Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.332113 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efddb7bbffba32917b2f3482befaf42ee9a745756decbacd0fffd0d9e7fe3f95"} err="failed to get container status \"efddb7bbffba32917b2f3482befaf42ee9a745756decbacd0fffd0d9e7fe3f95\": rpc error: code = NotFound desc = could not find container \"efddb7bbffba32917b2f3482befaf42ee9a745756decbacd0fffd0d9e7fe3f95\": container with ID starting with efddb7bbffba32917b2f3482befaf42ee9a745756decbacd0fffd0d9e7fe3f95 not found: ID does not exist" Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.372947 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81c61b4c-db85-4686-8814-f74ad46ddf5e" path="/var/lib/kubelet/pods/81c61b4c-db85-4686-8814-f74ad46ddf5e/volumes" Oct 08 08:25:25 crc kubenswrapper[4693]: I1008 08:25:25.374148 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc" path="/var/lib/kubelet/pods/f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc/volumes" Oct 08 08:25:32 crc kubenswrapper[4693]: I1008 08:25:32.023230 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-9t8ns_c2d8737c-16dd-429e-a6e0-3d2c35877083/control-plane-machine-set-operator/0.log" Oct 08 08:25:32 crc kubenswrapper[4693]: I1008 08:25:32.124721 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-dh66b_b303a16a-8059-4d90-91ac-2ba5c953f346/kube-rbac-proxy/0.log" Oct 08 08:25:32 crc kubenswrapper[4693]: I1008 08:25:32.178869 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-dh66b_b303a16a-8059-4d90-91ac-2ba5c953f346/machine-api-operator/0.log" Oct 08 08:25:46 crc kubenswrapper[4693]: I1008 08:25:46.933492 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-69p7f_c4e1c031-3b17-4c19-80f7-f37b55c3cb4a/cert-manager-controller/0.log" Oct 08 08:25:47 crc kubenswrapper[4693]: I1008 08:25:47.121178 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-55lln_3e9c884b-9e83-4f39-b92c-c278a1a08a2a/cert-manager-cainjector/0.log" Oct 08 08:25:47 crc kubenswrapper[4693]: I1008 08:25:47.149180 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-c4c8h_8d52a50f-bc7f-4317-a82a-678905b53fcc/cert-manager-webhook/0.log" Oct 08 08:26:01 crc kubenswrapper[4693]: I1008 08:26:01.526295 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-j658k_5ad24461-fc5f-44fd-94e2-68b8ef30e152/nmstate-console-plugin/0.log" Oct 08 08:26:02 crc kubenswrapper[4693]: I1008 08:26:02.375926 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-945gn_ae5b67a9-cb0d-4f73-8353-2bba4708a176/nmstate-metrics/0.log" Oct 08 08:26:02 crc kubenswrapper[4693]: I1008 08:26:02.419222 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-j9lkx_a28c017b-9170-4749-80cf-60b85681a4e7/nmstate-handler/0.log" Oct 08 08:26:02 crc kubenswrapper[4693]: I1008 08:26:02.422478 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-945gn_ae5b67a9-cb0d-4f73-8353-2bba4708a176/kube-rbac-proxy/0.log" Oct 08 08:26:02 crc kubenswrapper[4693]: I1008 08:26:02.591667 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-48qbg_55f815ee-eb64-4f69-b192-081c71664f3b/nmstate-operator/0.log" Oct 08 08:26:02 crc kubenswrapper[4693]: I1008 08:26:02.617084 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-fpzdq_69abf05d-a12d-4255-a1cf-a57efdc57a93/nmstate-webhook/0.log" Oct 08 08:26:17 crc kubenswrapper[4693]: I1008 08:26:17.451919 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-nj6r5_99056ede-9949-4966-a265-fc3af4134013/kube-rbac-proxy/0.log" Oct 08 08:26:17 crc kubenswrapper[4693]: I1008 08:26:17.515120 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-nj6r5_99056ede-9949-4966-a265-fc3af4134013/controller/0.log" Oct 08 08:26:17 crc kubenswrapper[4693]: I1008 08:26:17.635276 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/cp-frr-files/0.log" Oct 08 08:26:17 crc kubenswrapper[4693]: I1008 08:26:17.893345 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/cp-frr-files/0.log" Oct 08 08:26:17 crc kubenswrapper[4693]: I1008 08:26:17.930927 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/cp-metrics/0.log" Oct 08 08:26:17 crc kubenswrapper[4693]: I1008 08:26:17.935973 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/cp-reloader/0.log" Oct 08 08:26:17 crc kubenswrapper[4693]: I1008 08:26:17.937321 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/cp-reloader/0.log" Oct 08 08:26:18 crc kubenswrapper[4693]: I1008 08:26:18.178552 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/cp-frr-files/0.log" Oct 08 08:26:18 crc kubenswrapper[4693]: I1008 08:26:18.220961 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/cp-reloader/0.log" Oct 08 08:26:18 crc kubenswrapper[4693]: I1008 08:26:18.245770 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/cp-metrics/0.log" Oct 08 08:26:18 crc kubenswrapper[4693]: I1008 08:26:18.254524 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/cp-metrics/0.log" Oct 08 08:26:18 crc kubenswrapper[4693]: I1008 08:26:18.416787 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/cp-frr-files/0.log" Oct 08 08:26:18 crc kubenswrapper[4693]: I1008 08:26:18.445450 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/cp-metrics/0.log" Oct 08 08:26:18 crc kubenswrapper[4693]: I1008 08:26:18.483156 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/controller/0.log" Oct 08 08:26:18 crc kubenswrapper[4693]: I1008 08:26:18.494591 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/cp-reloader/0.log" Oct 08 08:26:18 crc kubenswrapper[4693]: I1008 08:26:18.615597 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/frr-metrics/0.log" Oct 08 08:26:18 crc kubenswrapper[4693]: I1008 08:26:18.672406 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/kube-rbac-proxy-frr/0.log" Oct 08 08:26:18 crc kubenswrapper[4693]: I1008 08:26:18.737676 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/kube-rbac-proxy/0.log" Oct 08 08:26:18 crc kubenswrapper[4693]: I1008 08:26:18.828773 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/reloader/0.log" Oct 08 08:26:18 crc kubenswrapper[4693]: I1008 08:26:18.913149 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-4857d_b17ddd86-5c6e-4898-a859-ce43f604fc10/frr-k8s-webhook-server/0.log" Oct 08 08:26:19 crc kubenswrapper[4693]: I1008 08:26:19.127258 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-6b6d7649c4-6krt4_20b2cb2b-9d01-44fa-a40e-2375df3a92d7/manager/0.log" Oct 08 08:26:19 crc kubenswrapper[4693]: I1008 08:26:19.260858 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6559c8fcd-4bbqs_9db7ae35-d57f-4342-8a8c-ff3613e28905/webhook-server/0.log" Oct 08 08:26:19 crc kubenswrapper[4693]: I1008 08:26:19.420842 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-tjkwg_88513bc0-a703-458f-a001-d6a636023c45/kube-rbac-proxy/0.log" Oct 08 08:26:19 crc kubenswrapper[4693]: I1008 08:26:19.858302 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-tjkwg_88513bc0-a703-458f-a001-d6a636023c45/speaker/0.log" Oct 08 08:26:19 crc kubenswrapper[4693]: I1008 08:26:19.991574 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jglgg_a7633c77-af8c-4789-88d5-fbbb01f3e751/frr/0.log" Oct 08 08:26:34 crc kubenswrapper[4693]: I1008 08:26:34.426279 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j_b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9/util/0.log" Oct 08 08:26:34 crc kubenswrapper[4693]: I1008 08:26:34.612619 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j_b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9/util/0.log" Oct 08 08:26:34 crc kubenswrapper[4693]: I1008 08:26:34.623674 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j_b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9/pull/0.log" Oct 08 08:26:34 crc kubenswrapper[4693]: I1008 08:26:34.658401 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j_b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9/pull/0.log" Oct 08 08:26:34 crc kubenswrapper[4693]: I1008 08:26:34.824086 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j_b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9/util/0.log" Oct 08 08:26:34 crc kubenswrapper[4693]: I1008 08:26:34.831937 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j_b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9/pull/0.log" Oct 08 08:26:34 crc kubenswrapper[4693]: I1008 08:26:34.848606 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w2s6j_b0fe86fd-7ab1-4654-a5b7-e4797e7b12d9/extract/0.log" Oct 08 08:26:34 crc kubenswrapper[4693]: I1008 08:26:34.995402 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-n4jbg_5331821d-f991-4245-9a76-c889657a38b8/extract-utilities/0.log" Oct 08 08:26:35 crc kubenswrapper[4693]: I1008 08:26:35.152740 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-n4jbg_5331821d-f991-4245-9a76-c889657a38b8/extract-content/0.log" Oct 08 08:26:35 crc kubenswrapper[4693]: I1008 08:26:35.171680 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-n4jbg_5331821d-f991-4245-9a76-c889657a38b8/extract-content/0.log" Oct 08 08:26:35 crc kubenswrapper[4693]: I1008 08:26:35.182132 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-n4jbg_5331821d-f991-4245-9a76-c889657a38b8/extract-utilities/0.log" Oct 08 08:26:35 crc kubenswrapper[4693]: I1008 08:26:35.341142 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-n4jbg_5331821d-f991-4245-9a76-c889657a38b8/extract-content/0.log" Oct 08 08:26:35 crc kubenswrapper[4693]: I1008 08:26:35.347018 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-n4jbg_5331821d-f991-4245-9a76-c889657a38b8/extract-utilities/0.log" Oct 08 08:26:35 crc kubenswrapper[4693]: I1008 08:26:35.546662 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vt599_f4f1924c-7d6d-4c94-9903-33499a98ffb1/extract-utilities/0.log" Oct 08 08:26:35 crc kubenswrapper[4693]: I1008 08:26:35.858173 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-n4jbg_5331821d-f991-4245-9a76-c889657a38b8/registry-server/0.log" Oct 08 08:26:36 crc kubenswrapper[4693]: I1008 08:26:36.064013 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vt599_f4f1924c-7d6d-4c94-9903-33499a98ffb1/extract-utilities/0.log" Oct 08 08:26:36 crc kubenswrapper[4693]: I1008 08:26:36.103826 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vt599_f4f1924c-7d6d-4c94-9903-33499a98ffb1/extract-content/0.log" Oct 08 08:26:36 crc kubenswrapper[4693]: I1008 08:26:36.164992 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vt599_f4f1924c-7d6d-4c94-9903-33499a98ffb1/extract-content/0.log" Oct 08 08:26:36 crc kubenswrapper[4693]: I1008 08:26:36.281195 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vt599_f4f1924c-7d6d-4c94-9903-33499a98ffb1/extract-content/0.log" Oct 08 08:26:36 crc kubenswrapper[4693]: I1008 08:26:36.291333 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vt599_f4f1924c-7d6d-4c94-9903-33499a98ffb1/extract-utilities/0.log" Oct 08 08:26:36 crc kubenswrapper[4693]: I1008 08:26:36.508409 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2_2378198d-e3ee-4cdc-a298-0d386fdf78ae/util/0.log" Oct 08 08:26:36 crc kubenswrapper[4693]: I1008 08:26:36.688950 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2_2378198d-e3ee-4cdc-a298-0d386fdf78ae/util/0.log" Oct 08 08:26:36 crc kubenswrapper[4693]: I1008 08:26:36.697972 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2_2378198d-e3ee-4cdc-a298-0d386fdf78ae/pull/0.log" Oct 08 08:26:36 crc kubenswrapper[4693]: I1008 08:26:36.748697 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vt599_f4f1924c-7d6d-4c94-9903-33499a98ffb1/registry-server/0.log" Oct 08 08:26:36 crc kubenswrapper[4693]: I1008 08:26:36.809169 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2_2378198d-e3ee-4cdc-a298-0d386fdf78ae/pull/0.log" Oct 08 08:26:36 crc kubenswrapper[4693]: I1008 08:26:36.914897 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2_2378198d-e3ee-4cdc-a298-0d386fdf78ae/extract/0.log" Oct 08 08:26:36 crc kubenswrapper[4693]: I1008 08:26:36.924865 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2_2378198d-e3ee-4cdc-a298-0d386fdf78ae/pull/0.log" Oct 08 08:26:36 crc kubenswrapper[4693]: I1008 08:26:36.945459 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ct69x2_2378198d-e3ee-4cdc-a298-0d386fdf78ae/util/0.log" Oct 08 08:26:37 crc kubenswrapper[4693]: I1008 08:26:37.092394 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-nkdmr_9c82bb62-a293-463f-ba14-c6fcf26e3a90/marketplace-operator/0.log" Oct 08 08:26:37 crc kubenswrapper[4693]: I1008 08:26:37.125246 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-sz5qh_fa717bec-7159-42c4-98b4-65eca2bd583b/extract-utilities/0.log" Oct 08 08:26:37 crc kubenswrapper[4693]: I1008 08:26:37.280485 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-sz5qh_fa717bec-7159-42c4-98b4-65eca2bd583b/extract-content/0.log" Oct 08 08:26:37 crc kubenswrapper[4693]: I1008 08:26:37.293937 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-sz5qh_fa717bec-7159-42c4-98b4-65eca2bd583b/extract-utilities/0.log" Oct 08 08:26:37 crc kubenswrapper[4693]: I1008 08:26:37.314490 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-sz5qh_fa717bec-7159-42c4-98b4-65eca2bd583b/extract-content/0.log" Oct 08 08:26:37 crc kubenswrapper[4693]: I1008 08:26:37.467250 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-sz5qh_fa717bec-7159-42c4-98b4-65eca2bd583b/extract-content/0.log" Oct 08 08:26:37 crc kubenswrapper[4693]: I1008 08:26:37.473806 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-sz5qh_fa717bec-7159-42c4-98b4-65eca2bd583b/extract-utilities/0.log" Oct 08 08:26:37 crc kubenswrapper[4693]: I1008 08:26:37.526299 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ccgvl_a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51/extract-utilities/0.log" Oct 08 08:26:37 crc kubenswrapper[4693]: I1008 08:26:37.655533 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-sz5qh_fa717bec-7159-42c4-98b4-65eca2bd583b/registry-server/0.log" Oct 08 08:26:37 crc kubenswrapper[4693]: I1008 08:26:37.903990 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ccgvl_a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51/extract-content/0.log" Oct 08 08:26:37 crc kubenswrapper[4693]: I1008 08:26:37.911076 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ccgvl_a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51/extract-utilities/0.log" Oct 08 08:26:37 crc kubenswrapper[4693]: I1008 08:26:37.915293 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ccgvl_a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51/extract-content/0.log" Oct 08 08:26:38 crc kubenswrapper[4693]: I1008 08:26:38.051860 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ccgvl_a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51/extract-content/0.log" Oct 08 08:26:38 crc kubenswrapper[4693]: I1008 08:26:38.051910 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ccgvl_a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51/extract-utilities/0.log" Oct 08 08:26:38 crc kubenswrapper[4693]: I1008 08:26:38.578181 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ccgvl_a44ec819-82cb-4ac2-8ac5-49dbc0cb8e51/registry-server/0.log" Oct 08 08:26:53 crc kubenswrapper[4693]: I1008 08:26:53.489600 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 08:26:53 crc kubenswrapper[4693]: I1008 08:26:53.490218 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 08:27:23 crc kubenswrapper[4693]: I1008 08:27:23.490393 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 08:27:23 crc kubenswrapper[4693]: I1008 08:27:23.491759 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 08:27:53 crc kubenswrapper[4693]: I1008 08:27:53.489377 4693 patch_prober.go:28] interesting pod/machine-config-daemon-xwrvr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 08 08:27:53 crc kubenswrapper[4693]: I1008 08:27:53.490004 4693 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 08 08:27:53 crc kubenswrapper[4693]: I1008 08:27:53.490062 4693 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" Oct 08 08:27:53 crc kubenswrapper[4693]: I1008 08:27:53.490867 4693 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fa3cd56c0eed3942747ccd430bf922d53e92971c2b796f79bba7506d1c4e56fb"} pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 08 08:27:53 crc kubenswrapper[4693]: I1008 08:27:53.490954 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerName="machine-config-daemon" containerID="cri-o://fa3cd56c0eed3942747ccd430bf922d53e92971c2b796f79bba7506d1c4e56fb" gracePeriod=600 Oct 08 08:27:53 crc kubenswrapper[4693]: E1008 08:27:53.617265 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:27:53 crc kubenswrapper[4693]: I1008 08:27:53.643930 4693 generic.go:334] "Generic (PLEG): container finished" podID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" containerID="fa3cd56c0eed3942747ccd430bf922d53e92971c2b796f79bba7506d1c4e56fb" exitCode=0 Oct 08 08:27:53 crc kubenswrapper[4693]: I1008 08:27:53.643995 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" event={"ID":"adebc2b6-0bd4-4c1c-8b8f-68a98012f490","Type":"ContainerDied","Data":"fa3cd56c0eed3942747ccd430bf922d53e92971c2b796f79bba7506d1c4e56fb"} Oct 08 08:27:53 crc kubenswrapper[4693]: I1008 08:27:53.644059 4693 scope.go:117] "RemoveContainer" containerID="f7ef691dbf7078b04c5820d01b35339f572409288d9df3c27d07056f58129fe4" Oct 08 08:27:53 crc kubenswrapper[4693]: I1008 08:27:53.645003 4693 scope.go:117] "RemoveContainer" containerID="fa3cd56c0eed3942747ccd430bf922d53e92971c2b796f79bba7506d1c4e56fb" Oct 08 08:27:53 crc kubenswrapper[4693]: E1008 08:27:53.645553 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:28:08 crc kubenswrapper[4693]: I1008 08:28:08.363530 4693 scope.go:117] "RemoveContainer" containerID="fa3cd56c0eed3942747ccd430bf922d53e92971c2b796f79bba7506d1c4e56fb" Oct 08 08:28:08 crc kubenswrapper[4693]: E1008 08:28:08.364590 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:28:21 crc kubenswrapper[4693]: I1008 08:28:21.363629 4693 scope.go:117] "RemoveContainer" containerID="fa3cd56c0eed3942747ccd430bf922d53e92971c2b796f79bba7506d1c4e56fb" Oct 08 08:28:21 crc kubenswrapper[4693]: E1008 08:28:21.365075 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:28:33 crc kubenswrapper[4693]: I1008 08:28:33.131771 4693 generic.go:334] "Generic (PLEG): container finished" podID="f8598397-df5e-4a4b-ac22-4348239ab87f" containerID="e422207ba46edc0b86cde1c31b351cf9cb0b405708b093a37d6c6870ba353cb4" exitCode=0 Oct 08 08:28:33 crc kubenswrapper[4693]: I1008 08:28:33.131896 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rtcj7/must-gather-jd4b4" event={"ID":"f8598397-df5e-4a4b-ac22-4348239ab87f","Type":"ContainerDied","Data":"e422207ba46edc0b86cde1c31b351cf9cb0b405708b093a37d6c6870ba353cb4"} Oct 08 08:28:33 crc kubenswrapper[4693]: I1008 08:28:33.133167 4693 scope.go:117] "RemoveContainer" containerID="e422207ba46edc0b86cde1c31b351cf9cb0b405708b093a37d6c6870ba353cb4" Oct 08 08:28:33 crc kubenswrapper[4693]: I1008 08:28:33.590098 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-rtcj7_must-gather-jd4b4_f8598397-df5e-4a4b-ac22-4348239ab87f/gather/0.log" Oct 08 08:28:36 crc kubenswrapper[4693]: I1008 08:28:36.366120 4693 scope.go:117] "RemoveContainer" containerID="fa3cd56c0eed3942747ccd430bf922d53e92971c2b796f79bba7506d1c4e56fb" Oct 08 08:28:36 crc kubenswrapper[4693]: E1008 08:28:36.366987 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:28:45 crc kubenswrapper[4693]: I1008 08:28:45.357080 4693 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-rtcj7/must-gather-jd4b4"] Oct 08 08:28:45 crc kubenswrapper[4693]: I1008 08:28:45.358315 4693 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-rtcj7/must-gather-jd4b4" podUID="f8598397-df5e-4a4b-ac22-4348239ab87f" containerName="copy" containerID="cri-o://27dd57459d447c99202c691db833fcf89e50d3dc7d31c67ef6fa13e91ed1263b" gracePeriod=2 Oct 08 08:28:45 crc kubenswrapper[4693]: I1008 08:28:45.385174 4693 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-rtcj7/must-gather-jd4b4"] Oct 08 08:28:46 crc kubenswrapper[4693]: I1008 08:28:46.226957 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-rtcj7_must-gather-jd4b4_f8598397-df5e-4a4b-ac22-4348239ab87f/copy/0.log" Oct 08 08:28:46 crc kubenswrapper[4693]: I1008 08:28:46.227612 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rtcj7/must-gather-jd4b4" Oct 08 08:28:46 crc kubenswrapper[4693]: I1008 08:28:46.265310 4693 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-rtcj7_must-gather-jd4b4_f8598397-df5e-4a4b-ac22-4348239ab87f/copy/0.log" Oct 08 08:28:46 crc kubenswrapper[4693]: I1008 08:28:46.265626 4693 generic.go:334] "Generic (PLEG): container finished" podID="f8598397-df5e-4a4b-ac22-4348239ab87f" containerID="27dd57459d447c99202c691db833fcf89e50d3dc7d31c67ef6fa13e91ed1263b" exitCode=143 Oct 08 08:28:46 crc kubenswrapper[4693]: I1008 08:28:46.265691 4693 scope.go:117] "RemoveContainer" containerID="27dd57459d447c99202c691db833fcf89e50d3dc7d31c67ef6fa13e91ed1263b" Oct 08 08:28:46 crc kubenswrapper[4693]: I1008 08:28:46.265694 4693 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rtcj7/must-gather-jd4b4" Oct 08 08:28:46 crc kubenswrapper[4693]: I1008 08:28:46.284051 4693 scope.go:117] "RemoveContainer" containerID="e422207ba46edc0b86cde1c31b351cf9cb0b405708b093a37d6c6870ba353cb4" Oct 08 08:28:46 crc kubenswrapper[4693]: I1008 08:28:46.348606 4693 scope.go:117] "RemoveContainer" containerID="27dd57459d447c99202c691db833fcf89e50d3dc7d31c67ef6fa13e91ed1263b" Oct 08 08:28:46 crc kubenswrapper[4693]: E1008 08:28:46.358284 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27dd57459d447c99202c691db833fcf89e50d3dc7d31c67ef6fa13e91ed1263b\": container with ID starting with 27dd57459d447c99202c691db833fcf89e50d3dc7d31c67ef6fa13e91ed1263b not found: ID does not exist" containerID="27dd57459d447c99202c691db833fcf89e50d3dc7d31c67ef6fa13e91ed1263b" Oct 08 08:28:46 crc kubenswrapper[4693]: I1008 08:28:46.358326 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27dd57459d447c99202c691db833fcf89e50d3dc7d31c67ef6fa13e91ed1263b"} err="failed to get container status \"27dd57459d447c99202c691db833fcf89e50d3dc7d31c67ef6fa13e91ed1263b\": rpc error: code = NotFound desc = could not find container \"27dd57459d447c99202c691db833fcf89e50d3dc7d31c67ef6fa13e91ed1263b\": container with ID starting with 27dd57459d447c99202c691db833fcf89e50d3dc7d31c67ef6fa13e91ed1263b not found: ID does not exist" Oct 08 08:28:46 crc kubenswrapper[4693]: I1008 08:28:46.358353 4693 scope.go:117] "RemoveContainer" containerID="e422207ba46edc0b86cde1c31b351cf9cb0b405708b093a37d6c6870ba353cb4" Oct 08 08:28:46 crc kubenswrapper[4693]: E1008 08:28:46.361150 4693 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e422207ba46edc0b86cde1c31b351cf9cb0b405708b093a37d6c6870ba353cb4\": container with ID starting with e422207ba46edc0b86cde1c31b351cf9cb0b405708b093a37d6c6870ba353cb4 not found: ID does not exist" containerID="e422207ba46edc0b86cde1c31b351cf9cb0b405708b093a37d6c6870ba353cb4" Oct 08 08:28:46 crc kubenswrapper[4693]: I1008 08:28:46.361207 4693 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e422207ba46edc0b86cde1c31b351cf9cb0b405708b093a37d6c6870ba353cb4"} err="failed to get container status \"e422207ba46edc0b86cde1c31b351cf9cb0b405708b093a37d6c6870ba353cb4\": rpc error: code = NotFound desc = could not find container \"e422207ba46edc0b86cde1c31b351cf9cb0b405708b093a37d6c6870ba353cb4\": container with ID starting with e422207ba46edc0b86cde1c31b351cf9cb0b405708b093a37d6c6870ba353cb4 not found: ID does not exist" Oct 08 08:28:46 crc kubenswrapper[4693]: I1008 08:28:46.407266 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f8598397-df5e-4a4b-ac22-4348239ab87f-must-gather-output\") pod \"f8598397-df5e-4a4b-ac22-4348239ab87f\" (UID: \"f8598397-df5e-4a4b-ac22-4348239ab87f\") " Oct 08 08:28:46 crc kubenswrapper[4693]: I1008 08:28:46.407378 4693 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-82dh2\" (UniqueName: \"kubernetes.io/projected/f8598397-df5e-4a4b-ac22-4348239ab87f-kube-api-access-82dh2\") pod \"f8598397-df5e-4a4b-ac22-4348239ab87f\" (UID: \"f8598397-df5e-4a4b-ac22-4348239ab87f\") " Oct 08 08:28:46 crc kubenswrapper[4693]: I1008 08:28:46.442195 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8598397-df5e-4a4b-ac22-4348239ab87f-kube-api-access-82dh2" (OuterVolumeSpecName: "kube-api-access-82dh2") pod "f8598397-df5e-4a4b-ac22-4348239ab87f" (UID: "f8598397-df5e-4a4b-ac22-4348239ab87f"). InnerVolumeSpecName "kube-api-access-82dh2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 08 08:28:46 crc kubenswrapper[4693]: I1008 08:28:46.514629 4693 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-82dh2\" (UniqueName: \"kubernetes.io/projected/f8598397-df5e-4a4b-ac22-4348239ab87f-kube-api-access-82dh2\") on node \"crc\" DevicePath \"\"" Oct 08 08:28:46 crc kubenswrapper[4693]: I1008 08:28:46.600975 4693 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8598397-df5e-4a4b-ac22-4348239ab87f-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "f8598397-df5e-4a4b-ac22-4348239ab87f" (UID: "f8598397-df5e-4a4b-ac22-4348239ab87f"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 08 08:28:46 crc kubenswrapper[4693]: I1008 08:28:46.616802 4693 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f8598397-df5e-4a4b-ac22-4348239ab87f-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 08 08:28:47 crc kubenswrapper[4693]: I1008 08:28:47.376365 4693 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8598397-df5e-4a4b-ac22-4348239ab87f" path="/var/lib/kubelet/pods/f8598397-df5e-4a4b-ac22-4348239ab87f/volumes" Oct 08 08:28:48 crc kubenswrapper[4693]: I1008 08:28:48.363166 4693 scope.go:117] "RemoveContainer" containerID="fa3cd56c0eed3942747ccd430bf922d53e92971c2b796f79bba7506d1c4e56fb" Oct 08 08:28:48 crc kubenswrapper[4693]: E1008 08:28:48.363408 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:29:02 crc kubenswrapper[4693]: I1008 08:29:02.363809 4693 scope.go:117] "RemoveContainer" containerID="fa3cd56c0eed3942747ccd430bf922d53e92971c2b796f79bba7506d1c4e56fb" Oct 08 08:29:02 crc kubenswrapper[4693]: E1008 08:29:02.365262 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:29:14 crc kubenswrapper[4693]: I1008 08:29:14.363582 4693 scope.go:117] "RemoveContainer" containerID="fa3cd56c0eed3942747ccd430bf922d53e92971c2b796f79bba7506d1c4e56fb" Oct 08 08:29:14 crc kubenswrapper[4693]: E1008 08:29:14.364718 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:29:29 crc kubenswrapper[4693]: I1008 08:29:29.363779 4693 scope.go:117] "RemoveContainer" containerID="fa3cd56c0eed3942747ccd430bf922d53e92971c2b796f79bba7506d1c4e56fb" Oct 08 08:29:29 crc kubenswrapper[4693]: E1008 08:29:29.365169 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:29:41 crc kubenswrapper[4693]: I1008 08:29:41.363845 4693 scope.go:117] "RemoveContainer" containerID="fa3cd56c0eed3942747ccd430bf922d53e92971c2b796f79bba7506d1c4e56fb" Oct 08 08:29:41 crc kubenswrapper[4693]: E1008 08:29:41.364846 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:29:53 crc kubenswrapper[4693]: I1008 08:29:53.363700 4693 scope.go:117] "RemoveContainer" containerID="fa3cd56c0eed3942747ccd430bf922d53e92971c2b796f79bba7506d1c4e56fb" Oct 08 08:29:53 crc kubenswrapper[4693]: E1008 08:29:53.364756 4693 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xwrvr_openshift-machine-config-operator(adebc2b6-0bd4-4c1c-8b8f-68a98012f490)\"" pod="openshift-machine-config-operator/machine-config-daemon-xwrvr" podUID="adebc2b6-0bd4-4c1c-8b8f-68a98012f490" Oct 08 08:30:00 crc kubenswrapper[4693]: I1008 08:30:00.159127 4693 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331870-wtgbr"] Oct 08 08:30:00 crc kubenswrapper[4693]: E1008 08:30:00.159864 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc" containerName="registry-server" Oct 08 08:30:00 crc kubenswrapper[4693]: I1008 08:30:00.159881 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc" containerName="registry-server" Oct 08 08:30:00 crc kubenswrapper[4693]: E1008 08:30:00.159903 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8598397-df5e-4a4b-ac22-4348239ab87f" containerName="gather" Oct 08 08:30:00 crc kubenswrapper[4693]: I1008 08:30:00.159911 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8598397-df5e-4a4b-ac22-4348239ab87f" containerName="gather" Oct 08 08:30:00 crc kubenswrapper[4693]: E1008 08:30:00.159948 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc" containerName="extract-content" Oct 08 08:30:00 crc kubenswrapper[4693]: I1008 08:30:00.159957 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc" containerName="extract-content" Oct 08 08:30:00 crc kubenswrapper[4693]: E1008 08:30:00.159969 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc" containerName="extract-utilities" Oct 08 08:30:00 crc kubenswrapper[4693]: I1008 08:30:00.159977 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc" containerName="extract-utilities" Oct 08 08:30:00 crc kubenswrapper[4693]: E1008 08:30:00.159989 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8598397-df5e-4a4b-ac22-4348239ab87f" containerName="copy" Oct 08 08:30:00 crc kubenswrapper[4693]: I1008 08:30:00.159997 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8598397-df5e-4a4b-ac22-4348239ab87f" containerName="copy" Oct 08 08:30:00 crc kubenswrapper[4693]: E1008 08:30:00.160023 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81c61b4c-db85-4686-8814-f74ad46ddf5e" containerName="registry-server" Oct 08 08:30:00 crc kubenswrapper[4693]: I1008 08:30:00.160031 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="81c61b4c-db85-4686-8814-f74ad46ddf5e" containerName="registry-server" Oct 08 08:30:00 crc kubenswrapper[4693]: E1008 08:30:00.160048 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81c61b4c-db85-4686-8814-f74ad46ddf5e" containerName="extract-utilities" Oct 08 08:30:00 crc kubenswrapper[4693]: I1008 08:30:00.160056 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="81c61b4c-db85-4686-8814-f74ad46ddf5e" containerName="extract-utilities" Oct 08 08:30:00 crc kubenswrapper[4693]: E1008 08:30:00.160085 4693 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81c61b4c-db85-4686-8814-f74ad46ddf5e" containerName="extract-content" Oct 08 08:30:00 crc kubenswrapper[4693]: I1008 08:30:00.160093 4693 state_mem.go:107] "Deleted CPUSet assignment" podUID="81c61b4c-db85-4686-8814-f74ad46ddf5e" containerName="extract-content" Oct 08 08:30:00 crc kubenswrapper[4693]: I1008 08:30:00.160315 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8598397-df5e-4a4b-ac22-4348239ab87f" containerName="copy" Oct 08 08:30:00 crc kubenswrapper[4693]: I1008 08:30:00.160329 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="81c61b4c-db85-4686-8814-f74ad46ddf5e" containerName="registry-server" Oct 08 08:30:00 crc kubenswrapper[4693]: I1008 08:30:00.160355 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="f01b6ca9-fc04-4eb5-8020-173ba1ecd9fc" containerName="registry-server" Oct 08 08:30:00 crc kubenswrapper[4693]: I1008 08:30:00.160377 4693 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8598397-df5e-4a4b-ac22-4348239ab87f" containerName="gather" Oct 08 08:30:00 crc kubenswrapper[4693]: I1008 08:30:00.161299 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331870-wtgbr" Oct 08 08:30:00 crc kubenswrapper[4693]: I1008 08:30:00.164222 4693 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 08 08:30:00 crc kubenswrapper[4693]: I1008 08:30:00.164337 4693 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 08 08:30:00 crc kubenswrapper[4693]: I1008 08:30:00.203991 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331870-wtgbr"] Oct 08 08:30:00 crc kubenswrapper[4693]: I1008 08:30:00.293497 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4ce4e367-2a1e-4e9a-b8be-6e1adebb61fc-config-volume\") pod \"collect-profiles-29331870-wtgbr\" (UID: \"4ce4e367-2a1e-4e9a-b8be-6e1adebb61fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331870-wtgbr" Oct 08 08:30:00 crc kubenswrapper[4693]: I1008 08:30:00.293604 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pbdz\" (UniqueName: \"kubernetes.io/projected/4ce4e367-2a1e-4e9a-b8be-6e1adebb61fc-kube-api-access-6pbdz\") pod \"collect-profiles-29331870-wtgbr\" (UID: \"4ce4e367-2a1e-4e9a-b8be-6e1adebb61fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331870-wtgbr" Oct 08 08:30:00 crc kubenswrapper[4693]: I1008 08:30:00.293762 4693 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4ce4e367-2a1e-4e9a-b8be-6e1adebb61fc-secret-volume\") pod \"collect-profiles-29331870-wtgbr\" (UID: \"4ce4e367-2a1e-4e9a-b8be-6e1adebb61fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331870-wtgbr" Oct 08 08:30:00 crc kubenswrapper[4693]: I1008 08:30:00.396625 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4ce4e367-2a1e-4e9a-b8be-6e1adebb61fc-config-volume\") pod \"collect-profiles-29331870-wtgbr\" (UID: \"4ce4e367-2a1e-4e9a-b8be-6e1adebb61fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331870-wtgbr" Oct 08 08:30:00 crc kubenswrapper[4693]: I1008 08:30:00.396870 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pbdz\" (UniqueName: \"kubernetes.io/projected/4ce4e367-2a1e-4e9a-b8be-6e1adebb61fc-kube-api-access-6pbdz\") pod \"collect-profiles-29331870-wtgbr\" (UID: \"4ce4e367-2a1e-4e9a-b8be-6e1adebb61fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331870-wtgbr" Oct 08 08:30:00 crc kubenswrapper[4693]: I1008 08:30:00.397016 4693 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4ce4e367-2a1e-4e9a-b8be-6e1adebb61fc-secret-volume\") pod \"collect-profiles-29331870-wtgbr\" (UID: \"4ce4e367-2a1e-4e9a-b8be-6e1adebb61fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331870-wtgbr" Oct 08 08:30:00 crc kubenswrapper[4693]: I1008 08:30:00.398681 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4ce4e367-2a1e-4e9a-b8be-6e1adebb61fc-config-volume\") pod \"collect-profiles-29331870-wtgbr\" (UID: \"4ce4e367-2a1e-4e9a-b8be-6e1adebb61fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331870-wtgbr" Oct 08 08:30:00 crc kubenswrapper[4693]: I1008 08:30:00.409558 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4ce4e367-2a1e-4e9a-b8be-6e1adebb61fc-secret-volume\") pod \"collect-profiles-29331870-wtgbr\" (UID: \"4ce4e367-2a1e-4e9a-b8be-6e1adebb61fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331870-wtgbr" Oct 08 08:30:00 crc kubenswrapper[4693]: I1008 08:30:00.428647 4693 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pbdz\" (UniqueName: \"kubernetes.io/projected/4ce4e367-2a1e-4e9a-b8be-6e1adebb61fc-kube-api-access-6pbdz\") pod \"collect-profiles-29331870-wtgbr\" (UID: \"4ce4e367-2a1e-4e9a-b8be-6e1adebb61fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331870-wtgbr" Oct 08 08:30:00 crc kubenswrapper[4693]: I1008 08:30:00.501536 4693 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331870-wtgbr" Oct 08 08:30:01 crc kubenswrapper[4693]: I1008 08:30:01.040104 4693 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331870-wtgbr"] Oct 08 08:30:01 crc kubenswrapper[4693]: I1008 08:30:01.104252 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331870-wtgbr" event={"ID":"4ce4e367-2a1e-4e9a-b8be-6e1adebb61fc","Type":"ContainerStarted","Data":"53e20bec94b3a647fd1a5623df250ae7aff33634df4760ad090bd20ff97bb375"} Oct 08 08:30:02 crc kubenswrapper[4693]: I1008 08:30:02.121709 4693 generic.go:334] "Generic (PLEG): container finished" podID="4ce4e367-2a1e-4e9a-b8be-6e1adebb61fc" containerID="7ae6861992c3b91b6b3d71f65c23527fa608e78709ecdc0b00add001af4a0644" exitCode=0 Oct 08 08:30:02 crc kubenswrapper[4693]: I1008 08:30:02.122168 4693 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331870-wtgbr" event={"ID":"4ce4e367-2a1e-4e9a-b8be-6e1adebb61fc","Type":"ContainerDied","Data":"7ae6861992c3b91b6b3d71f65c23527fa608e78709ecdc0b00add001af4a0644"} var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515071420421024441 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015071420421017356 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015071407415016511 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015071407415015461 5ustar corecore